From e49ee6d004a568bbdf29ccc35e62fcc132f33fef Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Mon, 24 Mar 2025 23:02:46 -0400 Subject: [PATCH 01/32] chore(scripts): add script to generate clone trajectory data Signed-off-by: Cameron Smith --- scripts/clone/clone_gen.py | 289 +++++++++++++++++++++++++++++++++++++ 1 file changed, 289 insertions(+) create mode 100644 scripts/clone/clone_gen.py diff --git a/scripts/clone/clone_gen.py b/scripts/clone/clone_gen.py new file mode 100644 index 000000000..e4c0cba1d --- /dev/null +++ b/scripts/clone/clone_gen.py @@ -0,0 +1,289 @@ +#!/usr/bin/env python +import logging +import os +from pathlib import Path + +import numpy as np +import rich_click as click +from anndata import AnnData +from beartype import beartype +from beartype.typing import Dict, List, Optional +from rich.console import Console +from rich.logging import RichHandler +from rich.theme import Theme + +from pyrovelocity.io.datasets import larry_mono, larry_neu +from pyrovelocity.plots._trajectory import get_clone_trajectory + +click.rich_click.SHOW_ARGUMENTS = True +click.rich_click.USE_MARKDOWN = True + + +def configure_logging(logger_name: str = "clone_gen") -> logging.Logger: + """Configure rich logging with custom theme.""" + console_theme = Theme( + { + "logging.level.info": "dim cyan", + "logging.level.warning": "magenta", + "logging.level.error": "bold red", + "logging.level.debug": "green", + } + ) + console = Console(theme=console_theme) + rich_handler = RichHandler( + console=console, + rich_tracebacks=True, + show_time=True, + show_level=True, + show_path=False, + markup=True, + log_time_format="[%X]", + ) + valid_log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] + log_level = os.getenv("LOG_LEVEL", "INFO").upper() + + if log_level not in valid_log_levels: + log_level = "INFO" + + logging.basicConfig( + level=log_level, + format="%(message)s", + datefmt="[%X]", + handlers=[rich_handler], + ) + logger = logging.getLogger(logger_name) + logger.setLevel(log_level) + return logger + + +logger = configure_logging() + + +@beartype +def generate_clone_trajectory( + adata: AnnData, + average_start_point: bool = True, + times: List[int] = [2, 4, 6], + clone_num: Optional[int] = None, + fix_nans: bool = True, +) -> AnnData: + """Generate clone trajectory data from AnnData object. + + Args: + adata: The input AnnData object + average_start_point: Whether to average the start point + times: List of time points to consider + clone_num: Maximum number of clones to process + fix_nans: Whether to replace NaN values with zeros + + Returns: + AnnData object with clone trajectory information + """ + logger.info(f"Generating clone trajectory for dataset with {adata.n_obs} cells") + adata_clone = get_clone_trajectory( + adata, average_start_point=average_start_point, + times=times, clone_num=clone_num + ) + + if fix_nans and "clone_vector_emb" in adata_clone.obsm: + nan_count = np.isnan(adata_clone.obsm["clone_vector_emb"]).sum() + if nan_count > 0: + logger.info(f"Fixing {nan_count} NaN values in clone_vector_emb") + adata_clone.obsm["clone_vector_emb"][ + np.isnan(adata_clone.obsm["clone_vector_emb"]) + ] = 0 + + return adata_clone + + +@beartype +def generate_all_clone_trajectories( + output_dir: Path, + mono_path: Optional[str] = None, + neu_path: Optional[str] = None, + output_names: Dict[str, str] = None, +) -> Dict[str, Path]: + """Pre-compute and cache clone trajectories for different lineage datasets. + + Args: + output_dir: Directory to save generated trajectory files + mono_path: Optional custom path for mono dataset + neu_path: Optional custom path for neu dataset + output_names: Optional custom output filenames + + Returns: + Dictionary mapping dataset names to file paths + """ + output_dir.mkdir(parents=True, exist_ok=True) + + if output_names is None: + output_names = { + "mono": "larry_mono_clone_trajectory.h5ad", + "neu": "larry_neu_clone_trajectory.h5ad", + "multilineage": "larry_multilineage_clone_trajectory.h5ad", + } + + logger.info(f"Loading monocyte lineage data from {'custom path' if mono_path else 'default path'}") + mono_adata = larry_mono(mono_path) if mono_path else larry_mono() + mono_clone = generate_clone_trajectory(mono_adata) + mono_clone_path = output_dir / output_names["mono"] + logger.info(f"Writing monocyte clone trajectory to {mono_clone_path}") + mono_clone.write_h5ad(mono_clone_path) + + logger.info(f"Loading neutrophil lineage data from {'custom path' if neu_path else 'default path'}") + neu_adata = larry_neu(neu_path) if neu_path else larry_neu() + neu_clone = generate_clone_trajectory(neu_adata) + neu_clone_path = output_dir / output_names["neu"] + logger.info(f"Writing neutrophil clone trajectory to {neu_clone_path}") + neu_clone.write_h5ad(neu_clone_path) + + logger.info("Creating concatenated multilineage clone trajectory") + multi_clone = mono_clone.concatenate(neu_clone) + multi_clone_path = output_dir / output_names["multilineage"] + logger.info(f"Writing multilineage clone trajectory to {multi_clone_path}") + multi_clone.write_h5ad(multi_clone_path) + + logger.info("All clone trajectories generated successfully") + + return { + "mono": mono_clone_path, + "neu": neu_clone_path, + "multilineage": multi_clone_path + } + + +@click.group( + invoke_without_command=True, + context_settings={"help_option_names": ["-h", "--help"]}, +) +@click.pass_context +def cli(ctx): + """ + # clone_gen + _**clone_gen**_ generates pre-computed clone trajectory files for PyroVelocity. + + This tool downloads LARRY dataset samples and computes clone trajectories that + can be later used directly in the plot_lineage_fate_correlation function. + + Pass -h or --help to each command group listed below for detailed help. + """ + if ctx.invoked_subcommand is None: + click.echo(ctx.get_help()) + + +@cli.command("generate") +@click.option( + "-o", + "--output-dir", + "output_dir", + default="data/external", + help="Output directory for the generated trajectories.", + show_default=True, + type=click.Path(), +) +@click.option( + "--mono-path", + "mono_path", + default=None, + help="Optional custom path for larry_mono dataset.", + type=click.Path(exists=False), +) +@click.option( + "--neu-path", + "neu_path", + default=None, + help="Optional custom path for larry_neu dataset.", + type=click.Path(exists=False), +) +def generate_trajectories(output_dir, mono_path, neu_path): + """ + # clone_gen generate + + Generate pre-computed clone trajectories for the LARRY datasets. + + This command: + 1. Downloads the larry_mono and larry_neu datasets if needed + 2. Computes clone trajectories using get_clone_trajectory + 3. Creates a concatenated multilineage trajectory + 4. Saves all trajectories to h5ad files + + These pre-computed trajectories can then be used with plot_lineage_fate_correlation + to generate consistent visualizations without redundant computation. + """ + output_dir_path = Path(output_dir) + result_paths = generate_all_clone_trajectories( + output_dir=output_dir_path, + mono_path=mono_path, + neu_path=neu_path, + ) + + logger.info("Clone trajectories generated and saved to:") + for name, path in result_paths.items(): + logger.info(f" - {name}: {path}") + + logger.info("\nYou can now create functions in pyrovelocity.io.datasets to load these files:") + logger.info(""" +@beartype +def larry_mono_clone_trajectory( + file_path: str | Path = "data/external/larry_mono_clone_trajectory.h5ad", +) -> anndata._core.anndata.AnnData: + \"\"\" + Pre-computed clone trajectory data for the LARRY monocyte lineage. + + This contains the output of get_clone_trajectory applied to the larry_mono dataset. + + Returns: + AnnData object with clone trajectory information + \"\"\" + url = "https://storage.googleapis.com/pyrovelocity/data/larry_mono_clone_trajectory.h5ad" + adata = sc.read(file_path, backup_url=url, sparse=True, cache=True) + return adata + """) + + +@cli.command("examine") +@click.argument( + "trajectory_path", + type=click.Path(exists=True), +) +def examine_trajectory(trajectory_path): + """ + # clone_gen examine + + Examine a generated clone trajectory file and print information about its contents. + + ## arguments + - `TRAJECTORY_PATH`: Path to the clone trajectory file to examine + """ + import scanpy as sc + + try: + adata = sc.read(trajectory_path) + logger.info(f"Successfully loaded file: {trajectory_path}") + logger.info(f"AnnData object with n_obs × n_vars = {adata.n_obs} × {adata.n_vars}") + + if "state_info" in adata.obs: + centroid_count = sum(adata.obs["state_info"] == "Centroid") + logger.info(f"Contains {centroid_count} centroid cells") + + if "clone_vector_emb" in adata.obsm: + logger.info("Contains clone_vector_emb in obsm") + nan_count = np.isnan(adata.obsm["clone_vector_emb"]).sum() + if nan_count > 0: + logger.warning(f"Contains {nan_count} NaN values in clone_vector_emb") + else: + logger.info("No NaN values found in clone_vector_emb") + else: + logger.error("Missing clone_vector_emb in obsm") + + logger.info("\nAvailable keys:") + logger.info(f" obs keys: {list(adata.obs.keys())}") + logger.info(f" var keys: {list(adata.var.keys())}") + logger.info(f" obsm keys: {list(adata.obsm.keys())}") + + except Exception as e: + logger.error(f"Error examining trajectory file: {e}") + + +if __name__ == "__main__": + cli() From e2b4e5c98ffc4e96fb55b20f138711ddf7c5f84c Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Mon, 24 Mar 2025 23:03:11 -0400 Subject: [PATCH 02/32] fix(datasets): add clone trajectory data sets Signed-off-by: Cameron Smith --- src/pyrovelocity/io/datasets.py | 74 +++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/src/pyrovelocity/io/datasets.py b/src/pyrovelocity/io/datasets.py index a42a176eb..20d49a50e 100644 --- a/src/pyrovelocity/io/datasets.py +++ b/src/pyrovelocity/io/datasets.py @@ -24,6 +24,9 @@ "pancreas", "bonemarrow", "pbmc68k", + "larry_mono_clone_trajectory", + "larry_neu_clone_trajectory", + "larry_multilineage_clone_trajectory", ] @@ -301,6 +304,77 @@ def larry_multilineage( return adata +@beartype +def larry_mono_clone_trajectory( + file_path: str | Path = "data/external/larry_mono_clone_trajectory.h5ad", +) -> anndata._core.anndata.AnnData: + """ + Pre-computed clone trajectory data for the LARRY monocyte lineage. + + This contains the output of get_clone_trajectory applied to the larry_mono dataset. + The clone trajectory information is used for visualizing clonal progression + and calculating trajectory alignment with velocity predictions. + + Returns: + AnnData object with clone trajectory information + """ + url = "https://storage.googleapis.com/pyrovelocity/data/larry_mono_clone_trajectory.h5ad" + adata = sc.read(file_path, backup_url=url, sparse=True, cache=True) + expected_hash = ( + "f5d0dcb9baa63460c5be5a1ebdab6a97c6f3ec0b5641ab1b770d16fb96bd9fc9" + ) + _check_hash(file_path, expected_hash) + return adata + + +@beartype +def larry_neu_clone_trajectory( + file_path: str | Path = "data/external/larry_neu_clone_trajectory.h5ad", +) -> anndata._core.anndata.AnnData: + """ + Pre-computed clone trajectory data for the LARRY neutrophil lineage. + + This contains the output of get_clone_trajectory applied to the larry_neu dataset. + The clone trajectory information is used for visualizing clonal progression + and calculating trajectory alignment with velocity predictions. + + Returns: + AnnData object with clone trajectory information + """ + url = "https://storage.googleapis.com/pyrovelocity/data/larry_neu_clone_trajectory.h5ad" + adata = sc.read(file_path, backup_url=url, sparse=True, cache=True) + expected_hash = ( + "6e7dbc273c59e28f1962df31452d5eea00336089c36a44f55fcfc91f6f428396" + ) + _check_hash(file_path, expected_hash) + return adata + + +@beartype +def larry_multilineage_clone_trajectory( + file_path: str + | Path = "data/external/larry_multilineage_clone_trajectory.h5ad", +) -> anndata._core.anndata.AnnData: + """ + Pre-computed clone trajectory data for the LARRY multilineage dataset. + + This contains the concatenated output of get_clone_trajectory applied to + both larry_mono and larry_neu datasets. Using this pre-computed trajectory + ensures consistent fate analysis across both lineages without recomputing + trajectories separately. + + Returns: + AnnData object with clone trajectory information + """ + url = "https://storage.googleapis.com/pyrovelocity/data/larry_multilineage_clone_trajectory.h5ad" + adata = sc.read(file_path, backup_url=url, sparse=True, cache=True) + expected_hash = ( + "ffedda0332c411ca10c09562e5c8a50643af9120f65b0b3701bf30a8d5fdc97b" + ) + _check_hash(file_path, expected_hash) + return adata + + @beartype def pancreas( file_path: str | Path = "data/external/pancreas.h5ad", From 695a98c9e42159987ae2f829f169c9b65737a596 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 26 Mar 2025 13:35:14 -0400 Subject: [PATCH 03/32] fix(time_fate_correlation): refactor create_time_lineage_fate_correlation_plot for workflow usage Signed-off-by: Cameron Smith --- .../tasks/time_fate_correlation.py | 90 ++++++++++++++++++- 1 file changed, 89 insertions(+), 1 deletion(-) diff --git a/src/pyrovelocity/tasks/time_fate_correlation.py b/src/pyrovelocity/tasks/time_fate_correlation.py index 20b4cd2cd..d875515e8 100644 --- a/src/pyrovelocity/tasks/time_fate_correlation.py +++ b/src/pyrovelocity/tasks/time_fate_correlation.py @@ -3,7 +3,7 @@ import matplotlib import matplotlib.pyplot as plt from beartype import beartype -from beartype.typing import List, Tuple +from beartype.typing import List, Union from matplotlib.axes import Axes from matplotlib.figure import Figure from matplotlib.gridspec import GridSpec @@ -25,6 +25,7 @@ __all__ = [ "configure_time_lineage_fate_plot", + "create_time_lineage_fate_correlation_plot", ] logger = configure_logging(__name__) @@ -112,6 +113,93 @@ def estimate_time_lineage_fate_correlation( ) +@beartype +def create_time_lineage_fate_correlation_plot( + model_results: List[dict], + vertical_texts: List[str] = [ + "Monocytes", + "Neutrophils", + "Multilineage", + "All lineages", + ], + reports_path: Union[str, Path] = ".", +) -> Path: + """ + Create a time lineage fate correlation plot from model results. + + This function is designed to be called from Flyte workflow or standalone Python code, + processing model outputs to create lineage fate correlation visualizations. + + Args: + model_results: List of dictionaries containing model outputs with the following keys: + - data_model: String identifier for the data model + - postprocessed_data: Path to the postprocessed AnnData file + - pyrovelocity_data: Path to the posterior samples file + vertical_texts: Labels for each row in the plot + reports_path: Directory to save the plot + + Returns: + Path: The path where the final plot is saved + """ + n_rows = len(model_results) + n_cols = 7 + width = 14 + height = width * (n_rows / n_cols) + 1 + + fig = plt.figure(figsize=(width, height)) + + gs = fig.add_gridspec( + n_rows + 1, + n_cols + 1, + width_ratios=[0.02] + [1] * n_cols, + height_ratios=[1] * n_rows + [0.2], + ) + + adata_cospar = larry_cospar() + + all_axes = [] + data_set_model_pairing = None + + for i, model_output in enumerate(model_results): + data_set_model_pairing = model_output["data_model"] + + postprocessed_data_path = model_output["postprocessed_data"] + posterior_samples_path = model_output["pyrovelocity_data"] + + plot_path = Path(f"time_fate_correlation_{data_set_model_pairing}.pdf") + + axes = [fig.add_subplot(gs[i, j + 1]) for j in range(n_cols)] + all_axes.append(axes) + + plot_lineage_fate_correlation( + posterior_samples_path=posterior_samples_path, + adata_pyrovelocity=postprocessed_data_path, + adata_cospar=adata_cospar, + all_axes=axes, + fig=fig, + state_color_dict=LARRY_CELL_TYPE_COLORS, + lineage_fate_correlation_path=plot_path, + save_plot=False, + ylabel="", + show_titles=True if i == 0 else False, + show_colorbars=False, + default_fontsize=12 if matplotlib.rcParams["text.usetex"] else 9, + ) + + row_labels = ["a", "b", "c", "d"][:n_rows] + vertical_texts = vertical_texts[:n_rows] + + return configure_time_lineage_fate_plot( + fig=fig, + gs=gs, + all_axes=all_axes, + row_labels=row_labels, + vertical_texts=vertical_texts, + reports_path=Path(reports_path), + model_identifier=data_set_model_pairing or "model", + ) + + @beartype def configure_time_lineage_fate_plot( fig: Figure, From 07b0ee39c10c22ca3b8f02b182833e13eac0924c Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 26 Mar 2025 13:40:25 -0400 Subject: [PATCH 04/32] fix(workflows): use create_time_lineage_fate_correlation_plot task function Signed-off-by: Cameron Smith --- src/pyrovelocity/workflows/main_workflow.py | 86 +++++---------------- 1 file changed, 19 insertions(+), 67 deletions(-) diff --git a/src/pyrovelocity/workflows/main_workflow.py b/src/pyrovelocity/workflows/main_workflow.py index aaba85e52..3d62acff8 100644 --- a/src/pyrovelocity/workflows/main_workflow.py +++ b/src/pyrovelocity/workflows/main_workflow.py @@ -3,13 +3,11 @@ from datetime import timedelta from pathlib import Path -import matplotlib from beartype.typing import List from flytekit import Resources, current_context, dynamic, task from flytekit.extras.accelerators import T4, GPUAccelerator from flytekit.types.directory import FlyteDirectory from flytekit.types.file import FlyteFile -from matplotlib import pyplot as plt from returns.result import Failure, Success from pyrovelocity.interfaces import ( @@ -21,7 +19,6 @@ copy_files_to_directory, create_tarball_from_filtered_dir, ) -from pyrovelocity.io.datasets import larry_cospar from pyrovelocity.io.gcs import upload_file_concurrently from pyrovelocity.io.metrics import ( add_duration_to_run_info, @@ -30,16 +27,12 @@ load_json, ) from pyrovelocity.logging import configure_logging -from pyrovelocity.plots import ( - plot_lineage_fate_correlation, -) -from pyrovelocity.styles.colors import LARRY_CELL_TYPE_COLORS from pyrovelocity.tasks.data import download_dataset from pyrovelocity.tasks.postprocess import postprocess_dataset from pyrovelocity.tasks.preprocess import preprocess_dataset from pyrovelocity.tasks.summarize import summarize_dataset from pyrovelocity.tasks.time_fate_correlation import ( - configure_time_lineage_fate_plot, + create_time_lineage_fate_correlation_plot, ) from pyrovelocity.tasks.train import train_dataset from pyrovelocity.workflows.constants import ( @@ -513,72 +506,31 @@ def combine_time_lineage_fate_correlation( for model_results in model_ordered_results: print(model_results) - n_rows = len(model_results) - n_cols = 7 - width = 14 - height = width * (n_rows / n_cols) + 1 - - fig = plt.figure(figsize=(width, height)) - - gs = fig.add_gridspec( - n_rows + 1, - n_cols + 1, - width_ratios=[0.02] + [1] * n_cols, - height_ratios=[1] * n_rows + [0.2], - ) - - adata_cospar = larry_cospar() - - all_axes = [] - for i, model_output in enumerate(model_results): - data_set_model_pairing = model_output.data_model + prepared_model_results = [] + for model_output in model_results: postprocessed_data_path = model_output.postprocessed_data.download() - posterior_samples_path = model_output.pyrovelocity_data.download() - plot_path = Path( - f"time_fate_correlation_{data_set_model_pairing}.pdf" + prepared_model_results.append( + { + "data_model": model_output.data_model, + "postprocessed_data": postprocessed_data_path, + "pyrovelocity_data": posterior_samples_path, + } ) - axes = [fig.add_subplot(gs[i, j + 1]) for j in range(n_cols)] - all_axes.append(axes) - - plot_lineage_fate_correlation( - posterior_samples_path=posterior_samples_path, - adata_pyrovelocity=postprocessed_data_path, - adata_cospar=adata_cospar, - all_axes=axes, - fig=fig, - state_color_dict=LARRY_CELL_TYPE_COLORS, - lineage_fate_correlation_path=plot_path, - save_plot=False, - ylabel="", - show_titles=True if i == 0 else False, - show_colorbars=False, - default_fontsize=12 - if matplotlib.rcParams["text.usetex"] - else 9, + time_lineage_fate_correlation_plot = ( + create_time_lineage_fate_correlation_plot( + model_results=prepared_model_results, + vertical_texts=[ + "Monocytes", + "Neutrophils", + "Multilineage", + "All lineages", + ][: len(prepared_model_results)], + reports_path=Path("."), ) - - time_lineage_fate_correlation_plot = configure_time_lineage_fate_plot( - fig=fig, - gs=gs, - all_axes=all_axes, - row_labels=[ - "a", - "b", - "c", - "d", - ][:n_rows], - vertical_texts=[ - "Monocytes", - "Neutrophils", - "Multilineage", - "All lineages", - ][:n_rows], - reports_path=Path("."), - model_identifier=data_set_model_pairing, ) time_lineage_fate_correlation_plots.append( From 18e70f9244cfd84c4a9a355d522aab9381750096 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 26 Mar 2025 13:42:14 -0400 Subject: [PATCH 05/32] chore(workflows): revert cache to `2024.8.15` Signed-off-by: Cameron Smith --- src/pyrovelocity/workflows/main_workflow.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/pyrovelocity/workflows/main_workflow.py b/src/pyrovelocity/workflows/main_workflow.py index 3d62acff8..45c8956af 100644 --- a/src/pyrovelocity/workflows/main_workflow.py +++ b/src/pyrovelocity/workflows/main_workflow.py @@ -79,15 +79,15 @@ logger = configure_logging(__name__) -CACHE_VERSION = "2025.3.2" +CACHE_VERSION = "2024.8.15" DOWNLOAD_CACHE_VERSION = f"{CACHE_VERSION}.0" PREPROCESS_CACHE_VERSION = f"{CACHE_VERSION}.0" TRAIN_CACHE_VERSION = f"{CACHE_VERSION}.0" -POSTPROCESS_CACHE_VERSION = f"{CACHE_VERSION}.0" -SUMMARIZE_CACHE_VERSION = f"{CACHE_VERSION}.0" -UPLOAD_CACHE_VERSION = f"{CACHE_VERSION}.0" -LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.0" -COMBINE_METRICS_CACHE_VERSION = f"{CACHE_VERSION}.0" +POSTPROCESS_CACHE_VERSION = f"{CACHE_VERSION}.2" +SUMMARIZE_CACHE_VERSION = f"{CACHE_VERSION}.3" +UPLOAD_CACHE_VERSION = f"{CACHE_VERSION}.7" +LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.5" +COMBINE_METRICS_CACHE_VERSION = f"{CACHE_VERSION}.5" DEFAULT_ACCELERATOR_TYPE: GPUAccelerator = T4 From e7f219605347bdf3777fb45b425c2ea42dc5ab84 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 26 Mar 2025 13:43:27 -0400 Subject: [PATCH 06/32] chore(workflows): bump lineage fate correlation cache to `2024.8.15.6` Signed-off-by: Cameron Smith --- src/pyrovelocity/workflows/main_workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyrovelocity/workflows/main_workflow.py b/src/pyrovelocity/workflows/main_workflow.py index 45c8956af..94edc7674 100644 --- a/src/pyrovelocity/workflows/main_workflow.py +++ b/src/pyrovelocity/workflows/main_workflow.py @@ -86,7 +86,7 @@ POSTPROCESS_CACHE_VERSION = f"{CACHE_VERSION}.2" SUMMARIZE_CACHE_VERSION = f"{CACHE_VERSION}.3" UPLOAD_CACHE_VERSION = f"{CACHE_VERSION}.7" -LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.5" +LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.6" COMBINE_METRICS_CACHE_VERSION = f"{CACHE_VERSION}.5" DEFAULT_ACCELERATOR_TYPE: GPUAccelerator = T4 From ef3518b024ba200d64a942397fe243236736481b Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 26 Mar 2025 13:47:11 -0400 Subject: [PATCH 07/32] fix(constants): disable demo flag Signed-off-by: Cameron Smith --- src/pyrovelocity/workflows/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyrovelocity/workflows/constants.py b/src/pyrovelocity/workflows/constants.py index 53e98992e..86f1e3993 100644 --- a/src/pyrovelocity/workflows/constants.py +++ b/src/pyrovelocity/workflows/constants.py @@ -19,7 +19,7 @@ # # Defaults to False if not set. PYROVELOCITY_DEMO_FLAG = str_to_bool( - os.getenv("PYROVELOCITY_DEMO_FLAG", "True") + os.getenv("PYROVELOCITY_DEMO_FLAG", "False") ) if PYROVELOCITY_DEMO_FLAG: From 776ed2c5743c179d02ebc4a89a87cfdfeb2c19ef Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 26 Mar 2025 13:55:00 -0400 Subject: [PATCH 08/32] fix(main_workflow): reenable all data sets Signed-off-by: Cameron Smith --- src/pyrovelocity/workflows/main_workflow.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/pyrovelocity/workflows/main_workflow.py b/src/pyrovelocity/workflows/main_workflow.py index 94edc7674..b77dc5d70 100644 --- a/src/pyrovelocity/workflows/main_workflow.py +++ b/src/pyrovelocity/workflows/main_workflow.py @@ -682,9 +682,9 @@ def training_workflow( ] developmental_configurations = [ - # (bonemarrow_configuration, "bonemarrow"), + (bonemarrow_configuration, "bonemarrow"), (pancreas_configuration, "pancreas"), - # (pons_configuration, "pons"), + (pons_configuration, "pons"), ] lineage_traced_results = [] @@ -700,9 +700,9 @@ def training_workflow( ] if not PYROVELOCITY_DATA_SUBSET: - # configurations += stationary_configurations + configurations += stationary_configurations configurations += developmental_configurations - # configurations += lineage_traced_configurations + configurations += lineage_traced_configurations for config, data_set_name in configurations: result = map_model_configurations_over_data_set( From d194feaee48cfbdb839c351ecb78379b5581a24c Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:26:41 -0400 Subject: [PATCH 09/32] fix(lineage_fate_correlation): absorb nan cleanup from plotting function to get_clone_trajectory Signed-off-by: Cameron Smith --- .../plots/_lineage_fate_correlation.py | 35 ++++++++++++------- src/pyrovelocity/plots/_trajectory.py | 24 +++++++++++++ 2 files changed, 47 insertions(+), 12 deletions(-) diff --git a/src/pyrovelocity/plots/_lineage_fate_correlation.py b/src/pyrovelocity/plots/_lineage_fate_correlation.py index 2a2e646a6..5c5cb7947 100644 --- a/src/pyrovelocity/plots/_lineage_fate_correlation.py +++ b/src/pyrovelocity/plots/_lineage_fate_correlation.py @@ -7,7 +7,7 @@ import seaborn as sns from anndata import AnnData from beartype import beartype -from beartype.typing import Dict, List, Tuple +from beartype.typing import Dict, List, Optional, Tuple from matplotlib.axes import Axes from matplotlib.figure import Figure from scipy.spatial import distance @@ -18,7 +18,6 @@ from pyrovelocity.plots._time import plot_posterior_time from pyrovelocity.plots._trajectory import ( align_trajectory_diff, - get_clone_trajectory, ) from pyrovelocity.plots._uncertainty import ( get_posterior_sample_angle_uncertainty, @@ -39,6 +38,7 @@ def plot_lineage_fate_correlation( all_axes: List[Axes] | np.ndarray, fig: Figure, state_color_dict: Dict, + adata_input_clone: str | Path | AnnData, ylabel: str = "Monocyte lineage", dotsize: int = 3, scale: float = 0.35, @@ -55,17 +55,29 @@ def plot_lineage_fate_correlation( Plot lineage fate correlation with shared latent time estimates. Args: - posterior_samples_path (str | Path): Path to the posterior samples. - adata_pyrovelocity (str | Path): Path to the Pyro-Velocity AnnData object. - adata_scvelo (str | Path): Path to the scVelo AnnData object. - adata_cospar (AnnData): AnnData object with COSPAR results. - ax (Axes): Matplotlib axes. + posterior_samples_path (str | Path | AnnData): Path to the posterior samples. + adata_pyrovelocity (str | Path | AnnData): Path to the Pyro-Velocity AnnData object. + adata_cospar (str | Path | AnnData): AnnData object with COSPAR results. + all_axes (List[Axes] | np.ndarray): List of matplotlib axes. fig (Figure): Matplotlib figure. state_color_dict (Dict): Dictionary with cell state colors. - ylabel (str, optional): Label for y axis. Defaults to "Unipotent Monocyte lineage". + adata_input_clone (str | Path | AnnData): Pre-computed clone trajectory data. + ylabel (str, optional): Label for y axis. Defaults to "Monocyte lineage". dotsize (int, optional): Size of plotted points. Defaults to 3. scale (float, optional): Plot scale. Defaults to 0.35. arrow (float, optional): Arrow size. Defaults to 3.5. + lineage_fate_correlation_path (str | Path, optional): Path to save the plot. + Defaults to "lineage_fate_correlation.pdf". + save_plot (bool, optional): Whether to save the plot. Defaults to True. + show_colorbars (bool, optional): Whether to show colorbars. Defaults to False. + show_titles (bool, optional): Whether to show titles. Defaults to False. + default_fontsize (int, optional): Default font size. Defaults to 7. + default_title_padding (int, optional): Default title padding. Defaults to 2. + include_uncertainty_measures (bool, optional): Whether to include uncertainty + measures. Defaults to False. + + Returns: + List[Axes] | np.ndarray: The axes objects. Examples: >>> # xdoctest: +SKIP @@ -111,12 +123,11 @@ def plot_lineage_fate_correlation( adata_pyrovelocity = load_anndata_from_path(adata_pyrovelocity) if isinstance(adata_cospar, str | Path): adata_cospar = load_anndata_from_path(adata_cospar) + if isinstance(adata_input_clone, str | Path): + adata_input_clone = load_anndata_from_path(adata_input_clone) adata_scvelo = adata_pyrovelocity.copy() - adata_input_clone = get_clone_trajectory(adata_scvelo) - adata_input_clone.obsm["clone_vector_emb"][ - np.isnan(adata_input_clone.obsm["clone_vector_emb"]) - ] = 0 + density = 0.35 diff = align_trajectory_diff( [adata_input_clone, adata_scvelo, adata_scvelo], diff --git a/src/pyrovelocity/plots/_trajectory.py b/src/pyrovelocity/plots/_trajectory.py index 7be71cfec..6769bd518 100644 --- a/src/pyrovelocity/plots/_trajectory.py +++ b/src/pyrovelocity/plots/_trajectory.py @@ -11,7 +11,23 @@ def get_clone_trajectory( average_start_point: bool = True, times: List[int] = [2, 4, 6], clone_num: Optional[int] = None, + fix_nans: bool = True, ) -> AnnData: + """ + Generate clone trajectory data from AnnData object. + + Args: + adata: The input AnnData object with clone information + average_start_point: Whether to average the start point for visualization + times: List of time points to consider for trajectory construction + clone_num: Maximum number of clones to process, uses all if None + fix_nans: Whether to replace NaN values with zeros in clone_vector_emb + + Returns: + An AnnData object with clone trajectory information including: + - Centroid cells representing average positions at each time point + - clone_vector_emb in .obsm containing trajectory vectors + """ adata = adata.copy() if not average_start_point: adata.obsm["clone_vector_emb"] = np.zeros((adata.shape[0], 2)) @@ -90,6 +106,14 @@ def get_clone_trajectory( adata_new = adata.concatenate( centroids[0].concatenate(centroids[1:]), join="outer" ) + + if fix_nans and "clone_vector_emb" in adata_new.obsm: + nan_count = np.isnan(adata_new.obsm["clone_vector_emb"]).sum() + if nan_count > 0: + adata_new.obsm["clone_vector_emb"][ + np.isnan(adata_new.obsm["clone_vector_emb"]) + ] = 0 + return adata_new From 4f557a9107f10e1c374c543889b3cca2b115ad9c Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:30:40 -0400 Subject: [PATCH 10/32] fix(tasks): compute clone trajectories in time lineage fate correlation task - removed from plotting function Signed-off-by: Cameron Smith --- .../tasks/time_fate_correlation.py | 73 ++++++++++++++++++- 1 file changed, 72 insertions(+), 1 deletion(-) diff --git a/src/pyrovelocity/tasks/time_fate_correlation.py b/src/pyrovelocity/tasks/time_fate_correlation.py index d875515e8..531f654de 100644 --- a/src/pyrovelocity/tasks/time_fate_correlation.py +++ b/src/pyrovelocity/tasks/time_fate_correlation.py @@ -9,9 +9,12 @@ from matplotlib.gridspec import GridSpec from matplotlib.ticker import MaxNLocator -from pyrovelocity.io.datasets import larry_cospar +from pyrovelocity.io.datasets import ( + larry_cospar, +) from pyrovelocity.logging import configure_logging from pyrovelocity.plots import plot_lineage_fate_correlation +from pyrovelocity.plots._trajectory import get_clone_trajectory from pyrovelocity.styles import configure_matplotlib_style from pyrovelocity.styles.colors import LARRY_CELL_TYPE_COLORS from pyrovelocity.utils import load_anndata_from_path @@ -157,11 +160,75 @@ def create_time_lineage_fate_correlation_plot( adata_cospar = larry_cospar() + logger.info("Generating clone trajectories for all datasets") + clone_trajectories = {} + + for model_output in model_results: + data_set_model_pairing = model_output["data_model"] + dataset_name = data_set_model_pairing.split("_model")[0] + + if dataset_name in clone_trajectories: + continue + + postprocessed_data_path = model_output["postprocessed_data"] + + logger.info(f"Loading data for {dataset_name}") + adata_pyrovelocity = load_anndata_from_path(postprocessed_data_path) + + if dataset_name == "larry_multilineage": + logger.info( + "Creating multilineage clone trajectory from mono and neu subsets" + ) + + if "state_info" in adata_pyrovelocity.obs: + mono_mask = adata_pyrovelocity.obs["state_info"].str.contains( + "Mono", case=False, na=False + ) + neu_mask = adata_pyrovelocity.obs["state_info"].str.contains( + "Neu", case=False, na=False + ) + + mono_adata = adata_pyrovelocity[mono_mask].copy() + neu_adata = adata_pyrovelocity[neu_mask].copy() + + logger.info( + f" - Generating mono trajectory with {mono_adata.n_obs} cells" + ) + mono_clone = get_clone_trajectory(mono_adata) + + logger.info( + f" - Generating neu trajectory with {neu_adata.n_obs} cells" + ) + neu_clone = get_clone_trajectory(neu_adata) + + logger.info(" - Concatenating mono and neu trajectories") + clone_trajectories[dataset_name] = mono_clone.concatenate( + neu_clone + ) + else: + logger.warning( + "Could not identify mono/neu cells in multilineage dataset. Generating unified trajectory." + ) + clone_trajectories[dataset_name] = get_clone_trajectory( + adata_pyrovelocity + ) + else: + logger.info( + f"Generating clone trajectory for {dataset_name} with {adata_pyrovelocity.n_obs} cells" + ) + clone_trajectories[dataset_name] = get_clone_trajectory( + adata_pyrovelocity + ) + + logger.info(f"Completed trajectory generation for {dataset_name}") + + logger.info("Creating plots using generated trajectories") all_axes = [] data_set_model_pairing = None for i, model_output in enumerate(model_results): data_set_model_pairing = model_output["data_model"] + dataset_name = data_set_model_pairing.split("_model")[0] postprocessed_data_path = model_output["postprocessed_data"] posterior_samples_path = model_output["pyrovelocity_data"] @@ -171,6 +238,9 @@ def create_time_lineage_fate_correlation_plot( axes = [fig.add_subplot(gs[i, j + 1]) for j in range(n_cols)] all_axes.append(axes) + adata_input_clone = clone_trajectories[dataset_name] + logger.info(f"Using cached clone trajectory for {dataset_name}") + plot_lineage_fate_correlation( posterior_samples_path=posterior_samples_path, adata_pyrovelocity=postprocessed_data_path, @@ -178,6 +248,7 @@ def create_time_lineage_fate_correlation_plot( all_axes=axes, fig=fig, state_color_dict=LARRY_CELL_TYPE_COLORS, + adata_input_clone=adata_input_clone, lineage_fate_correlation_path=plot_path, save_plot=False, ylabel="", From 54178c30cb36f57778d5cda5120450991e4257a7 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:38:43 -0400 Subject: [PATCH 11/32] refactor(_trajectory): migrate analysis code from plots to analysis Signed-off-by: Cameron Smith --- src/pyrovelocity/{plots/_trajectory.py => analysis/trajectory.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/pyrovelocity/{plots/_trajectory.py => analysis/trajectory.py} (100%) diff --git a/src/pyrovelocity/plots/_trajectory.py b/src/pyrovelocity/analysis/trajectory.py similarity index 100% rename from src/pyrovelocity/plots/_trajectory.py rename to src/pyrovelocity/analysis/trajectory.py From 71174d2d0b5009da5fcd57af19cd469c5c90cd91 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:39:13 -0400 Subject: [PATCH 12/32] fix(trajectory): use input times and error on empty centroids Signed-off-by: Cameron Smith --- src/pyrovelocity/analysis/trajectory.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/pyrovelocity/analysis/trajectory.py b/src/pyrovelocity/analysis/trajectory.py index 6769bd518..7e266686f 100644 --- a/src/pyrovelocity/analysis/trajectory.py +++ b/src/pyrovelocity/analysis/trajectory.py @@ -77,9 +77,7 @@ def get_clone_trajectory( adata.obs.columns.get_loc("clones"), ] = int(j) adata_new.obs.loc[:, "time"] = [ - t - for t, time in zip([2, 4, 6], times_index) - if time.shape[0] > 0 + t for t, time in zip(times, times_index) if time.shape[0] > 0 ] adata_new.obs.loc[:, "clones"] = int(j) adata_new.obs.loc[:, "state_info"] = "Centroid" @@ -103,6 +101,9 @@ def get_clone_trajectory( centroids.append(adata_new) + if not centroids: + raise ValueError("No valid clone trajectories found in the data") + adata_new = adata.concatenate( centroids[0].concatenate(centroids[1:]), join="outer" ) From 16d382ea3b6b1f13e69e190d04adfcb41d54d561 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:40:13 -0400 Subject: [PATCH 13/32] fix(analysis): include trajectory functions Signed-off-by: Cameron Smith --- src/pyrovelocity/analysis/__init__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/pyrovelocity/analysis/__init__.py b/src/pyrovelocity/analysis/__init__.py index e8ae0076d..8e9e0b76d 100644 --- a/src/pyrovelocity/analysis/__init__.py +++ b/src/pyrovelocity/analysis/__init__.py @@ -1,10 +1,15 @@ import pyrovelocity.analysis.analyze import pyrovelocity.analysis.cytotrace import pyrovelocity.analysis.transcriptome_properties - +from pyrovelocity.analysis.trajectory import ( + align_trajectory_diff, + get_clone_trajectory, +) __all__ = [ "analyze", "cytotrace", "transcriptome_properties", + "align_trajectory_diff", + "get_clone_trajectory", ] From 38809fc1a3129b521a69e8e560f669b442159948 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:40:31 -0400 Subject: [PATCH 14/32] fix(plots): use trajectory functions from analysis package Signed-off-by: Cameron Smith --- .../plots/_lineage_fate_correlation.py | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/src/pyrovelocity/plots/_lineage_fate_correlation.py b/src/pyrovelocity/plots/_lineage_fate_correlation.py index 5c5cb7947..3c13c0bb5 100644 --- a/src/pyrovelocity/plots/_lineage_fate_correlation.py +++ b/src/pyrovelocity/plots/_lineage_fate_correlation.py @@ -13,12 +13,10 @@ from scipy.spatial import distance from scipy.stats import spearmanr +from pyrovelocity.analysis.trajectory import align_trajectory_diff from pyrovelocity.io.compressedpickle import CompressedPickle from pyrovelocity.logging import configure_logging from pyrovelocity.plots._time import plot_posterior_time -from pyrovelocity.plots._trajectory import ( - align_trajectory_diff, -) from pyrovelocity.plots._uncertainty import ( get_posterior_sample_angle_uncertainty, ) @@ -240,15 +238,14 @@ def plot_lineage_fate_correlation( ) ax.axis("off") if show_titles: + # "scVelo cosine similarity: %.2f" % scvelo_cos_mean, fontsize=default_fontsize ax.set_title( - # "scVelo cosine similarity: %.2f" % scvelo_cos_mean, fontsize=default_fontsize f"scVelo ({scvelo_cos_mean:.2f})", fontsize=default_fontsize, pad=default_title_padding, ) else: ax.set_title( - # f"" f"({scvelo_cos_mean:.2f})", fontsize=default_fontsize, pad=default_title_padding, @@ -275,8 +272,8 @@ def plot_lineage_fate_correlation( ) ax.axis("off") if show_titles: + # "Pyro-Velocity cosine similarity: %.2f" % pyro_cos_mean, fontsize=default_fontsize ax.set_title( - # "Pyro-Velocity cosine similarity: %.2f" % pyro_cos_mean, fontsize=default_fontsize rf"Pyro\thinspace-Velocity ({pyro_cos_mean:.2f})" if matplotlib.rcParams["text.usetex"] else f"Pyro\u2009-Velocity ({pyro_cos_mean:.2f})", @@ -285,7 +282,6 @@ def plot_lineage_fate_correlation( ) else: ax.set_title( - # f"" f"({pyro_cos_mean:.2f})", fontsize=default_fontsize, pad=default_title_padding, @@ -390,15 +386,12 @@ def plot_lineage_fate_correlation( ax.axis("off") if show_titles: ax.set_title( - # f"scVelo latent time\ncorrelation: {scvelo_latent_time_correlation:.2f}" - # f"scVelo latent time ({scvelo_latent_time_correlation:.2f})", f"scVelo time ({scvelo_latent_time_correlation:.2f})", fontsize=default_fontsize, pad=default_title_padding, ) else: ax.set_title( - # f"scVelo latent time\ncorrelation: {scvelo_latent_time_correlation:.2f}" f"({scvelo_latent_time_correlation:.2f})", fontsize=default_fontsize, pad=default_title_padding, @@ -425,8 +418,6 @@ def plot_lineage_fate_correlation( ) if show_titles: ax.set_title( - # f"Pyro-Velocity shared time\ncorrelation: {pyrovelocity_shared_time_correlation:.2f}" - # f"Pyro-Velocity shared time ({pyrovelocity_shared_time_correlation:.2f})", rf"Pyro\thinspace-Velocity time ({pyrovelocity_shared_time_correlation:.2f})" if matplotlib.rcParams["text.usetex"] else f"Pyro\u2009-Velocity time ({pyrovelocity_shared_time_correlation:.2f})", @@ -435,7 +426,6 @@ def plot_lineage_fate_correlation( ) else: ax.set_title( - # f"" f"({pyrovelocity_shared_time_correlation:.2f})", fontsize=default_fontsize, pad=default_title_padding, From 6f507886e2c3412cbd3bfbd38ada799b39e68438 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:40:47 -0400 Subject: [PATCH 15/32] fix(tasks): use trajectory functions from analysis package Signed-off-by: Cameron Smith --- src/pyrovelocity/tasks/time_fate_correlation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyrovelocity/tasks/time_fate_correlation.py b/src/pyrovelocity/tasks/time_fate_correlation.py index 531f654de..2937187d1 100644 --- a/src/pyrovelocity/tasks/time_fate_correlation.py +++ b/src/pyrovelocity/tasks/time_fate_correlation.py @@ -9,12 +9,12 @@ from matplotlib.gridspec import GridSpec from matplotlib.ticker import MaxNLocator +from pyrovelocity.analysis.trajectory import get_clone_trajectory from pyrovelocity.io.datasets import ( larry_cospar, ) from pyrovelocity.logging import configure_logging from pyrovelocity.plots import plot_lineage_fate_correlation -from pyrovelocity.plots._trajectory import get_clone_trajectory from pyrovelocity.styles import configure_matplotlib_style from pyrovelocity.styles.colors import LARRY_CELL_TYPE_COLORS from pyrovelocity.utils import load_anndata_from_path From 5c386927940bf9f161085e8539fee75f1a130432 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:47:16 -0400 Subject: [PATCH 16/32] fix(scripts): use trajectory functions from analysis package Signed-off-by: Cameron Smith --- scripts/clone/clone_gen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/clone/clone_gen.py b/scripts/clone/clone_gen.py index e4c0cba1d..6f1884876 100644 --- a/scripts/clone/clone_gen.py +++ b/scripts/clone/clone_gen.py @@ -13,7 +13,7 @@ from rich.theme import Theme from pyrovelocity.io.datasets import larry_mono, larry_neu -from pyrovelocity.plots._trajectory import get_clone_trajectory +from pyrovelocity.analysis.trajectory import get_clone_trajectory click.rich_click.SHOW_ARGUMENTS = True click.rich_click.USE_MARKDOWN = True From d14d2884221131e8da77d30d2e341e86f3c5f29d Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:50:20 -0400 Subject: [PATCH 17/32] chore(workflows): lineage fate correlation cache `2024.8.15.7` Signed-off-by: Cameron Smith --- src/pyrovelocity/workflows/main_workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyrovelocity/workflows/main_workflow.py b/src/pyrovelocity/workflows/main_workflow.py index b77dc5d70..6bbc354fa 100644 --- a/src/pyrovelocity/workflows/main_workflow.py +++ b/src/pyrovelocity/workflows/main_workflow.py @@ -86,7 +86,7 @@ POSTPROCESS_CACHE_VERSION = f"{CACHE_VERSION}.2" SUMMARIZE_CACHE_VERSION = f"{CACHE_VERSION}.3" UPLOAD_CACHE_VERSION = f"{CACHE_VERSION}.7" -LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.6" +LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.7" COMBINE_METRICS_CACHE_VERSION = f"{CACHE_VERSION}.5" DEFAULT_ACCELERATOR_TYPE: GPUAccelerator = T4 From 9fcd1affd2baf7b1f0402b99c20814caa530c337 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Fri, 28 Mar 2025 16:51:11 -0400 Subject: [PATCH 18/32] fix(make): support cleaning up workflow artifacts for multiple data sets Signed-off-by: Cameron Smith --- Makefile | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index a27242687..b71e7ead3 100644 --- a/Makefile +++ b/Makefile @@ -228,13 +228,32 @@ run-async: ## Run registered workflow (async). run-check-image: ## Check workflow image exists. crane ls $(WORKFLOW_IMAGE) | grep "$(GIT_REF)\|$(GIT_SHA)\|$(GIT_SHA_SHORT)" -TEST_DATASET_NAME ?= simulated +TEST_DATASET_NAMES ?= simulated larry larry_neu larry_mono larry_multilineage -workflow-clean-outputs: ## Clean testing dataset and model outputs. make workflow-clean-outputs TEST_DATASET_NAME=simulated. - rm data/external/$(TEST_DATASET_NAME).h5ad || true - rm data/processed/$(TEST_DATASET_NAME)_* || true - rm -r models/$(TEST_DATASET_NAME)_model* || true - rm -r reports/$(TEST_DATASET_NAME)_model* || true +# Function to clean a single dataset +define clean-dataset + @echo "Cleaning outputs for $(1)..." + rm -f data/processed/$(1)_* || true + rm -rf models/$(1)_model* || true + rm -rf reports/$(1)_model* || true + +endef + +# Function to print external dataset remove command +define print-rm-external + @echo " rm -f data/external/$(1).h5ad" + +endef + +workflow-clean-outputs: ## Clean testing dataset and model outputs. make workflow-clean-outputs TEST_DATASET_NAMES="simulated pancreas" + @echo "Cleaning outputs for datasets: $(TEST_DATASET_NAMES)" + $(foreach dataset,$(TEST_DATASET_NAMES),$(call clean-dataset,$(dataset))) + @echo "" + @echo "To remove external datasets, run one of these commands:" + $(foreach dataset,$(TEST_DATASET_NAMES),$(call print-rm-external,$(dataset))) + @echo "" + @echo "Or remove all with:" + @echo " rm -f data/external/{$(shell echo $(TEST_DATASET_NAMES) | tr ' ' ',')}.h5ad" workflow-clear-cache: ## Clear local cache db from ~/.flyte/local-cache/cache.db. pyflyte local-cache clear From 5693fe18e3cf5d470087b574703c46ad790599bf Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:52:16 -0400 Subject: [PATCH 19/32] chore(archive): add larry data assembly archive Signed-off-by: Cameron Smith --- .../figures/archive/larry_klein_clone.py | 735 ++++++++++++++++++ 1 file changed, 735 insertions(+) create mode 100644 reproducibility/figures/archive/larry_klein_clone.py diff --git a/reproducibility/figures/archive/larry_klein_clone.py b/reproducibility/figures/archive/larry_klein_clone.py new file mode 100644 index 000000000..4360e594e --- /dev/null +++ b/reproducibility/figures/archive/larry_klein_clone.py @@ -0,0 +1,735 @@ +#!/usr/bin/env python +# coding: utf-8 + +# In[1]: + + +from glob import glob +import numpy as np +from scipy.sparse import load_npz +from scipy.io import mmwrite, mmread +import scanpy as sc +import pandas as pd +import scvelo as scv +import anndata +from glob import glob +import re +import scipy + + +# In[2]: + + +all_data = glob('/data/pinello/PROJECTS/2019_11_ResidualVelocity/data/LARRY/klein_data/*') + + +# In[3]: + + +all_data + + +# In[24]: + + +counts_invivo = load_npz(all_data[-8]) +genes_invivo = pd.read_table(all_data[-6], + header=None) +clone_invivo = load_npz(all_data[-11]) +cell_meta_invivo = pd.read_table('/data/pinello/PROJECTS/2019_11_ResidualVelocity/data/LARRY/klein_data/cell_metadata_in_vivo.txt') +coordinate_invivo = pd.read_table('/data/pinello/PROJECTS/2019_11_ResidualVelocity/data/LARRY/klein_data/coordinates_in_vivo.txt', + header=None) + +invivo = sc.AnnData(counts_invivo, obs=cell_meta_invivo, var=genes_invivo) +invivo_clone = sc.AnnData(clone_invivo, cell_meta_invivo) +# sc.pp.filter_cells(invivo, min_genes=200) +# sc.pp.filter_genes(invivo, min_cells=3)w +invivo_clone = invivo_clone[invivo.obs.index,] + + +# In[25]: + + +embedding = np.concatenate([coordinate_invivo.loc[:(coordinate_invivo.shape[0]//2-1), :].values, + coordinate_invivo.loc[coordinate_invivo.shape[0]//2:, :].values], axis=1) +invivo.obsm['X_umap'] = embedding + + +# In[35]: + + +np.where(invivo_clone[:, c].X.toarray() == 1) + + +# In[37]: + + +# mito_genes = invivo.var_names.str.startswith('mt-') +# # for each cell compute fraction of counts in mito genes vs. all genes +# # the `.A1` is only necessary as X is sparse (to transform to a dense array after summing) +# invivo.obs['percent_mito'] = np.sum( +# invivo[:, mito_genes].X, axis=1).A1 / np.sum(invivo.X, axis=1).A1 +# # add the total counts per cell as observations-annotation to adata +# invivo.obs['n_counts'] = invivo.X.sum(axis=1).A1 +# sc.pl.violin(invivo, ['n_genes', 'n_counts', 'percent_mito'], +# jitter=0.4, multi_panel=True) + +# invivo = invivo[invivo.obs.n_genes < 5000, :] +# invivo = invivo[invivo.obs.percent_mito < 0.05, :] +# invivo_clone = invivo_clone[invivo.obs.index,] +# sc.pp.normalize_total(invivo, target_sum=1e4) +# sc.pp.log1p(invivo) +# invivo.raw = invivo +# sc.pp.highly_variable_genes(invivo, min_mean=0.0125, max_mean=3, min_disp=0.5) +# invivo = invivo[:, invivo.var.highly_variable] +# # sc.pp.regress_out(invivo, ['n_counts']) +# # sc.pp.scale(invivo, max_value=10) +# sc.tl.pca(invivo, svd_solver='arpack') +# sc.pp.neighbors(invivo, n_neighbors=10, n_pcs=40) +# sc.tl.umap(invivo) + +clone_invivodict = {} +for c in range(invivo_clone.shape[1]): + # for each clone number +# print(c, invivo_clone[:, 1].X.toarray()) + cells_clone, = np.where(invivo_clone[:, c].X.toarray()[:, 0] == 1) + if len(cells_clone) > 0: + clone_invivodict[c] = invivo_clone.obs.index[cells_clone] + + +# In[38]: + + +invivo.obs.loc[:, 'clone_demo'] = 0 +invivo.obs.loc[clone_invivodict[55], 'clone_demo'] = 1 +invivo.obs.loc[:, 'Time point'] = invivo.obs.loc[:, 'Time point'].astype('int').astype('category') +invivo.obs.loc[:, 'clone_demo'] = invivo.obs.loc[:, 'clone_demo'].astype('category') +invivo.obs.loc[:, 'Time point'].cat.categories +import matplotlib.pyplot as plt +fig, ax = plt.subplots(1, 2) +fig.set_size_inches((24, 12)) +sc.pl.umap(invivo, color='Annotation', ax=ax[0],show=False, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invivo, color='Time point', ax=ax[1], show=False, wspace=0.2, legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='right', title='', frameon=False) +sc.pl.umap(invivo[invivo.obs.clone_demo==1,:], color='Time point', ax=ax[1], show=False, size=120, edges_width=0.5, + legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='on data', title='', frameon=False) +# for s, e in list(zip(invitro.obs.loc[:, 'Time point'].cat.categories[:-1], sc.plotting._utils._tmp_cluster_pos[1:])): +for s, e in zip(sc.plotting._utils._tmp_cluster_pos[:-1], sc.plotting._utils._tmp_cluster_pos[1:]): + print(s, e) + ax[1].arrow(s[0], s[1], (e[0]-s[0])*0.8, (e[1]-s[1])*0.8, head_width=50, head_length=60, fc='k', ec='k') + + +# In[14]: + + +invivo.obs.loc[:, 'clone_demo'] = 0 +invivo.obs.loc[clone_invivodict[640], 'clone_demo'] = 1 +invivo.obs.loc[:, 'Time point'] = invivo.obs.loc[:, 'Time point'].astype('int').astype('category') +invivo.obs.loc[:, 'clone_demo'] = invivo.obs.loc[:, 'clone_demo'].astype('category') +invivo.obs.loc[:, 'Time point'].cat.categories +import matplotlib.pyplot as plt +fig, ax = plt.subplots(1, 2) +fig.set_size_inches((24, 12)) +sc.pl.umap(invivo, color='Annotation', ax=ax[0],show=False, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invivo, color='Time point', ax=ax[1], show=False, wspace=0.2, legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='right', title='', frameon=False) +sc.pl.umap(invivo[invivo.obs.clone_demo==1,:], color='Time point', ax=ax[1], show=False, size=120, edges_width=0.5, + legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='on data', title='', frameon=False) + +# for s, e in list(zip(invitro.obs.loc[:, 'Time point'].cat.categories[:-1], sc.plotting._utils._tmp_cluster_pos[1:])): +for s, e in zip(sc.plotting._utils._tmp_cluster_pos[:-1], sc.plotting._utils._tmp_cluster_pos[1:]): + print(s, e) + ax[1].arrow(s[0], s[1], (e[0]-s[0])*0.8, (e[1]-s[1])*0.8, head_width=50, head_length=60, fc='k', ec='k') + + +# In[15]: + + +fig + + +# In[36]: + + +counts_invitro = load_npz(all_data[6]) +genes_invitro = pd.read_table(all_data[-2], + header=None) +clone_invitro = load_npz(all_data[2]) +cell_meta_invitro = pd.read_table('/data/pinello/PROJECTS/2019_11_ResidualVelocity/data/LARRY/klein_data/cell_metadata_in_vitro.txt') +coordinate_invitro = pd.read_table('/data/pinello/PROJECTS/2019_11_ResidualVelocity/data/LARRY/klein_data/coordinates_in_vitro.txt', + header=None) +invitro = sc.AnnData(counts_invitro, obs=cell_meta_invitro, var=genes_invitro) +invitro_clone = sc.AnnData(clone_invitro, cell_meta_invitro) +# sc.pp.filter_cells(invitro, min_genes=200) +# sc.pp.filter_genes(invitro, min_cells=3) +invitro_clone = invitro_clone[invitro.obs.index,] +embedding = np.concatenate([coordinate_invitro.loc[:(coordinate_invitro.shape[0]//2-1), :].values, + coordinate_invitro.loc[coordinate_invitro.shape[0]//2:, :].values], axis=1) +invitro.obsm['X_umap'] = embedding + + +# In[41]: + + +get_ipython().system('head /data/pinello/PROJECTS/2019_11_ResidualVelocity/data/LARRY/klein_data/coordinates_in_vitro.txt') + + +# In[290]: + + +# mito_genes = invitro.var_names.str.startswith('mt-') +# # for each cell compute fraction of counts in mito genes vs. all genes +# # the `.A1` is only necessary as X is sparse (to transform to a dense array after summing) +# invitro.obs['percent_mito'] = np.sum( +# invitro[:, mito_genes].X, axis=1).A1 / np.sum(invitro.X, axis=1).A1 +# # add the total counts per cell as observations-annotation to adata +# invitro.obs['n_counts'] = invitro.X.sum(axis=1).A1 +# sc.pl.violin(invitro, ['n_genes', 'n_counts', 'percent_mito'], +# jitter=0.4, multi_panel=True) +# invitro = invitro[invitro.obs.n_genes < 5000, :] +# invitro = invitro[invitro.obs.percent_mito < 0.05, :] +# invitro_clone = invitro_clone[invitro.obs.index,] +# ## sc.pp.normalize_total(invitro, target_sum=1e4) ## weird errors +# sc.pp.log1p(invitro) +# invitro.raw = invitro +# sc.pp.highly_variable_genes(invitro, min_mean=0.0125, max_mean=3, min_disp=0.5) +# invitro = invitro[:, invitro.var.highly_variable] +# # sc.pp.regress_out(invivo, ['n_counts']) +# # sc.pp.scale(invivo, max_value=10) +# sc.tl.pca(invitro, svd_solver='arpack') +# sc.pp.neighbors(invitro, n_neighbors=50, n_pcs=50) +# sc.tl.umap(invitro) + + +# In[17]: + + +clone_dict = {} +for c in range(invitro_clone.shape[1]): + # for each clone number + cells_clone, = np.where(invitro_clone[:, c].X == 1) + if len(cells_clone) > 0: + clone_dict[c] = invitro_clone.obs.index[cells_clone] + + +# In[19]: + + +invitro_clone.obs.head() + + +# In[20]: + + +invitro.obs.loc[:, 'clone_demo'] = 0 +invitro.obs.loc[clone_dict[922], 'clone_demo'] = 1 +invitro.obs.loc[:, 'Time point'] = invitro.obs.loc[:, 'Time point'].astype('int').astype('category') +invitro.obs.loc[:, 'clone_demo'] = invitro.obs.loc[:, 'clone_demo'].astype('category') +invitro.obs.loc[:, 'Well'] = invitro.obs.loc[:, 'Well'].astype('category') +invitro.obs.loc[:, 'Time point'].cat.categories +import matplotlib.pyplot as plt +fig, ax = plt.subplots(1, 3) +fig.set_size_inches((30, 12)) +sc.pl.umap(invitro, color=['Annotation'], ax=ax[0], show=False, + legend_fontsize=32, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invitro, color=['Well'], ax=ax[1], show=False, + palette=['tab:orange', 'tab:purple', 'tab:blue'], + legend_fontsize=32, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invitro, color='Time point', ax=ax[2], show=False, wspace=0.2, legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='right', title='', frameon=False) +sc.pl.umap(invitro[invitro.obs.clone_demo==1,:], color='Time point', ax=ax[2], show=False, size=120, edges_width=0.5, + legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='on data', title='', frameon=False) + +# for s, e in list(zip(invitro.obs.loc[:, 'Time point'].cat.categories[:-1], sc.plotting._utils._tmp_cluster_pos[1:])): +for s, e in zip(sc.plotting._utils._tmp_cluster_pos[:-1], sc.plotting._utils._tmp_cluster_pos[1:]): + print(s, e) + ax[2].arrow(s[0], s[1], (e[0]-s[0])*0.8, (e[1]-s[1])*0.8, head_width=50, head_length=50, fc='k', ec='k') + + +# In[21]: + + +fig + + +# In[23]: + + +invitro.obs.loc[:, 'clone_demo'] = 0 +invitro.obs.loc[clone_dict[4300], 'clone_demo'] = 1 +invitro.obs.loc[:, 'Time point'] = invitro.obs.loc[:, 'Time point'].astype('int').astype('category') +invitro.obs.loc[:, 'clone_demo'] = invitro.obs.loc[:, 'clone_demo'].astype('category') +invitro.obs.loc[:, 'Time point'].cat.categories +import matplotlib.pyplot as plt +fig, ax = plt.subplots(1, 3) +fig.set_size_inches((24, 12)) +sc.pl.umap(invitro, color='Annotation', ax=ax[0],show=False, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invitro, color='Time point', ax=ax[1], show=False, wspace=0.2, legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='right', title='', frameon=False) +sc.pl.umap(invitro[invitro.obs.clone_demo==1,:], color='Time point', ax=ax[2], show=False, size=120, edges_width=0.5, + legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='on data', title='', frameon=False) + +# for s, e in list(zip(invitro.obs.loc[:, 'Time point'].cat.categories[:-1], sc.plotting._utils._tmp_cluster_pos[1:])): +for s, e in zip(sc.plotting._utils._tmp_cluster_pos[:-1], sc.plotting._utils._tmp_cluster_pos[1:]): + print(s, e) + ax[2].arrow(s[0], s[1], (e[0]-s[0])*0.8, (e[1]-s[1])*0.8, head_width=50, head_length=50, fc='k', ec='k') + + +# In[24]: + + +fig + + +# In[25]: + + +invitro.obs.loc[:, 'clone_demo'] = 0 +invitro.obs.loc[clone_dict[846], 'clone_demo'] = 1 +invitro.obs.loc[:, 'Time point'] = invitro.obs.loc[:, 'Time point'].astype('int').astype('category') +invitro.obs.loc[:, 'clone_demo'] = invitro.obs.loc[:, 'clone_demo'].astype('category') +invitro.obs.loc[:, 'Time point'].cat.categories +import matplotlib.pyplot as plt +fig, ax = plt.subplots(1, 3) +fig.set_size_inches((24, 12)) +sc.pl.umap(invitro, color='Annotation', ax=ax[0],show=False, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invitro, color='Time point', ax=ax[1], show=False, wspace=0.2, legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='right', title='', frameon=False) +sc.pl.umap(invitro[invitro.obs.clone_demo==1,:], color='Time point', ax=ax[1], show=False, size=120, edges_width=0.5, + legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='on data', title='', frameon=False) + +# for s, e in list(zip(invitro.obs.loc[:, 'Time point'].cat.categories[:-1], sc.plotting._utils._tmp_cluster_pos[1:])): +for s, e in zip(sc.plotting._utils._tmp_cluster_pos[:-1], sc.plotting._utils._tmp_cluster_pos[1:]): + print(s, e) + ax[1].arrow(s[0], s[1], (e[0]-s[0])*0.8, (e[1]-s[1])*0.8, head_width=50, head_length=50, fc='k', ec='k') + + +# In[27]: + + +invitro.obs.loc[:, 'clone_demo'] = 0 +invitro.obs.loc[clone_dict[851], 'clone_demo'] = 1 +invitro.obs.loc[:, 'Time point'] = invitro.obs.loc[:, 'Time point'].astype('int').astype('category') +invitro.obs.loc[:, 'clone_demo'] = invitro.obs.loc[:, 'clone_demo'].astype('category') +invitro.obs.loc[:, 'Time point'].cat.categories +import matplotlib.pyplot as plt +fig, ax = plt.subplots(1, 2) +fig.set_size_inches((24, 12)) +sc.pl.umap(invitro, color='Annotation', ax=ax[0],show=False, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invitro, color='Time point', ax=ax[1], show=False, wspace=0.2, legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='right', title='', frameon=False) +sc.pl.umap(invitro[invitro.obs.clone_demo==1,:], color='Time point', ax=ax[1], show=False, size=120, edges_width=0.5, + legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='on data', title='', frameon=False) + +# for s, e in list(zip(invitro.obs.loc[:, 'Time point'].cat.categories[:-1], sc.plotting._utils._tmp_cluster_pos[1:])): +for s, e in zip(sc.plotting._utils._tmp_cluster_pos[:-1], sc.plotting._utils._tmp_cluster_pos[1:]): + print(s, e) + ax[1].arrow(s[0], s[1], (e[0]-s[0])*0.8, (e[1]-s[1])*0.8, head_width=50, head_length=50, fc='k', ec='k') + + +# In[28]: + + +fig + + +# In[29]: + + +invitro.obs.loc[:, 'clone_demo'] = 0 +invitro.obs.loc[clone_dict[408], 'clone_demo'] = 1 +invitro.obs.loc[:, 'Time point'] = invitro.obs.loc[:, 'Time point'].astype('int').astype('category') +invitro.obs.loc[:, 'clone_demo'] = invitro.obs.loc[:, 'clone_demo'].astype('category') +invitro.obs.loc[:, 'Time point'].cat.categories +import matplotlib.pyplot as plt +fig, ax = plt.subplots(1, 2) +fig.set_size_inches((24, 12)) +sc.pl.umap(invitro, color='Annotation', ax=ax[0],show=False, + legend_loc='on data', title='', frameon=False) +sc.pl.umap(invitro, color='Time point', ax=ax[1], show=False, wspace=0.2, legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='right', title='', frameon=False) +sc.pl.umap(invitro[invitro.obs.clone_demo==1,:], color='Time point', ax=ax[1], show=False, size=120, edges_width=0.5, + legend_fontsize=32, + palette=['tab:orange', 'tab:red', 'tab:blue'], + legend_loc='on data', title='', frameon=False) + +# for s, e in list(zip(invitro.obs.loc[:, 'Time point'].cat.categories[:-1], sc.plotting._utils._tmp_cluster_pos[1:])): +for s, e in zip(sc.plotting._utils._tmp_cluster_pos[:-1], sc.plotting._utils._tmp_cluster_pos[1:]): + print(s, e) + ax[1].arrow(s[0], s[1], (e[0]-s[0])*0.8, (e[1]-s[1])*0.8, head_width=50, head_length=50, fc='k', ec='k') + + +# In[30]: + + +fig + + +# # mapping LARRY loom with the clone information + +# In[2]: + + +larry_invitros = [] +for i in glob('../../data/LARRY/*loom'): + larry_invitros.append(scv.read(i)) +larry_invitro_adata = larry_invitros[0].concatenate(*larry_invitros[1:]) + + +# In[3]: + + +n = 0 +for i in larry_invitros: + n += i.shape[0] +print(n) + + +# In[41]: + + +metadata = pd.read_table('../../data/LARRY/GSM4185642_stateFate_inVitro_metadata.txt') + + +# In[42]: + + +clone_invitro = scipy.io.mmread('../../data/LARRY/GSM4185642_stateFate_inVitro_clone_matrix.mtx') + + +# In[43]: + + +clone_invitro = clone_invitro.tocsr() + + +# In[44]: + + +metadata.shape, clone_invitro.shape + + +# In[45]: + + +cells = pd.read_table('../../data/LARRY/filtered_cell_barcodes.txt', header=None) + + +# In[46]: + + +librarys = pd.read_table('../../data/LARRY/filtered_library_names.txt', header=None) + + +# In[47]: + + +librarys[:5] + + +# In[48]: + + +genes_invitro = pd.read_table('../../data/LARRY/GSM4185642_stateFate_inVitro_gene_names.txt', + header=None) + + +# In[49]: + + +genes_invitro.columns = ['gene'] +genes_invitro.index = genes_invitro.gene + + +# In[50]: + + +cell_meta_invitro = pd.read_table('/data/pinello/PROJECTS/2019_11_ResidualVelocity/data/LARRY/GSM4185642_stateFate_inVitro_cell_barcodes.txt', + header=None) + + +# In[51]: + + +cell_meta_invitro.columns = ['barcode'] + + +# In[52]: + + +cell_meta_invitro.index = cell_meta_invitro.barcode + + +# In[53]: + + +cell_meta_invitro.head() + + +# In[54]: + + +filter_barcodes = pd.concat([librarys, + cells.iloc[:, 0].map(lambda x: x.replace('-', ''))], axis=1) + + +# In[55]: + + +filter_barcodes.head() + + +# In[56]: + + +filter_barcodes = filter_barcodes.apply(lambda x: ':'.join(x), axis=1).values + + +# In[57]: + + +larry_invitro_adata.obs.head() + + +# In[58]: + + +filter_barcodes[:5] + + +# In[59]: + + +metadata.head() + + +# In[60]: + + +np.intersect1d(larry_invitro_adata.obs.index.map(lambda x:re.sub('x-.*', '', x)), + filter_barcodes).shape + + +# In[61]: + + +larry_invitro_adata_sub = larry_invitro_adata[larry_invitro_adata.obs.index.map(lambda x:re.sub('x-.*', + '', x)).isin(filter_barcodes)] + + +# In[62]: + + +larry_invitro_adata_sub.obs.head() + + +# In[63]: + + +def match(x, y): + """return y index that match the x iterms""" + ind_dict = {} + for i, j in enumerate(y): + ind_dict[j] = i + inds = [] + for i in x: + inds.append(ind_dict[i]) + return np.array(inds) + + +# In[64]: + + +(metadata.loc[:, ['Library', 'Cell barcode']].apply(lambda x: '%s:%s' %(x[0], x[1].replace('-', '')), axis=1)==filter_barcodes).sum() + + +# In[67]: + + +larry_invitro_adata_sub.obs.head() + + +# In[70]: + + +larry_invitro_adata_sub.obs.index.map(lambda x:re.sub('x-.*', '', x)) + + +# In[68]: + + +filter_barcodes[:10] + + +# In[71]: + + +larry_invitro_adata_sub = larry_invitro_adata_sub[match(filter_barcodes, + larry_invitro_adata_sub.obs.index.map(lambda x:re.sub('x-.*', '', x))), :] + + +# In[72]: + + +larry_invitro_adata_sub.obs = metadata +larry_invitro_adata_sub.obs.index = filter_barcodes + + +# In[74]: + + +metadata.head() + + +# In[75]: + + +larry_invitro_adata_sub.obs.head() + + +# In[76]: + + +invitro_clone = sc.AnnData(clone_invitro, obs=metadata) + + +# In[78]: + + +invitro_clone.obs.index = filter_barcodes + + +# In[380]: + + +larry_invitro_adata.write('all_invitro_loom.h5ad') + + +# In[ ]: + + +larry_invitro_adata_sub.write('sub_invitro_loom.h5ad') + + +# In[ ]: + + +invitro_clone.write('all_invitro_loom_clone.h5ad') + + +# In[385]: + + +invitro_clone.shape + + +# In[403]: + + +from resvel import preprocess +from resvel.stat import run_velocity +import scanpy as sc + + +# In[394]: + + +larry_invitro_adata_sub.obsm['X_spring'] = larry_invitro_adata_sub.obs.loc[:, ['SPRING-x', 'SPRING-y']] .values +larry_invitro_adata_sub.obsm['spring'] = larry_invitro_adata_sub.obs.loc[:, ['SPRING-x', 'SPRING-y']] .values +larry_invitro_adata_sub.obsm['X_umap'] = larry_invitro_adata_sub.obs.loc[:, ['SPRING-x', 'SPRING-y']] .values + + +# In[402]: + + +larry_invitro_adata_sub.obs.head() + + +# In[400]: + + +fig, ax = plt.subplots() +sc.pl.embedding(larry_invitro_adata_sub, basis='umap', show=False, ax=ax) +fig + + +# In[401]: + + +larry_invitro_adata_sub = preprocess(larry_invitro_adata_sub) + + +# In[404]: + + +larry_invitro_adata_sub = run_velocity(larry_invitro_adata_sub, mode='dynamical') + + +# In[405]: + + +#larry_invitro_adata_sub.write('sub_invitro_loom_dynamical_velocity.h5ad') + + +# In[406]: + + +get_ipython().system('du -sh sub_invitro_loom_dynamical_velocity.h5ad') + + +# In[414]: + + +fig, ax = plt.subplots(2, 1) +fig.set_size_inches(10, 22) +scv.pl.velocity_embedding_stream(larry_invitro_adata_sub, color='Cell type annotation', + ax=ax[0], show=False) # scale=0.5, arrow_size=2.5 +scv.pl.scatter(larry_invitro_adata_sub, color='latent_time', show=False, + ax=ax[1]) +plt.tight_layout(pad=0) +fig + + +# In[415]: + + +fig.savefig('larry_invitro.png') + + +# In[416]: + + +get_ipython().system('ls larry_invitro.png') + + +# In[ ]: + + + + From deb5fcaeb7037b3bdd4505983f305a897904f250 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:52:32 -0400 Subject: [PATCH 20/32] chore(archive): add larry data subpopulation extraction archive Signed-off-by: Cameron Smith --- .../larry_clonal_subpopulation_extraction.py | 555 ++++++++++++++++++ 1 file changed, 555 insertions(+) create mode 100644 reproducibility/figures/archive/larry_clonal_subpopulation_extraction.py diff --git a/reproducibility/figures/archive/larry_clonal_subpopulation_extraction.py b/reproducibility/figures/archive/larry_clonal_subpopulation_extraction.py new file mode 100644 index 000000000..1d9719da3 --- /dev/null +++ b/reproducibility/figures/archive/larry_clonal_subpopulation_extraction.py @@ -0,0 +1,555 @@ +# --- +# jupyter: +# jupytext: +# text_representation: +# extension: .py +# format_name: percent +# format_version: '1.3' +# jupytext_version: 1.16.7 +# kernelspec: +# display_name: Python 3 +# language: python +# name: python3 +# --- + +# %% [markdown] +# # Gold standard for global + +# %% +# %load_ext autoreload +# %autoreload 2 + +# %% +import scvelo as scv +from scvelo.datasets import simulation +import numpy as np +import matplotlib.pyplot as plt +from dynamical_velocity2 import PyroVelocity +from dynamical_velocity2.data import load_data +from scipy.stats import pearsonr, spearmanr +import seaborn as sns +from dynamical_velocity2.data import load_data +import cospar as cs +import numpy as np +import scvelo as scv +cs.logging.print_version() +cs.settings.verbosity=2 +cs.settings.data_path='LARRY_data' # A relative path to save data. If not existed before, create a new one. +cs.settings.figure_path='LARRY_figure' # A relative path to save figures. If not existed before, create a new one. +cs.settings.set_figure_params(format='png',figsize=[4,3.5],dpi=75,fontsize=14,pointsize=2) +# !mkdir -p LARRY_figure +import scvelo as scv +from scvelo.datasets import simulation +import numpy as np +import matplotlib.pyplot as plt +from dynamical_velocity2 import PyroVelocity +from dynamical_velocity2.data import load_data +from scipy.stats import pearsonr, spearmanr +import seaborn as sns +from dynamical_velocity2.data import load_data + +# %% +from scipy.sparse import issparse + +# %% +#adata_processed = scv.read("../notebooks/larry_invitro_adata_sub_withvelocitycospar.h5ad") +adata_processed = scv.read("../notebooks/larry_invitro_adata_with_scvelo_dynamicalvelocity.h5ad") + +# %% [markdown] +# # Check processed scvelo adata + +# %% +scv.tl.velocity_embedding(adata_processed, vkey='velocity', basis='emb') +scv.pl.velocity_embedding_grid(adata_processed, figsize=(15, 15), scale=0.3, basis='emb', color='state_info') + +# %% +adata_processed.obs.state_info.cat.categories + +# %% +adata_processed.var.velocity_genes.sum(), adata_processed.shape + +# %% [markdown] +# # Run old Normal likelihood model + +# %% +adata = scv.read("../notebooks/larry_invitro_adata_sub_raw.h5ad") +adata_processed.layers['raw_spliced'] = adata[:, adata_processed.var_names].layers['spliced'] +adata_processed.layers['raw_unspliced'] = adata[:, adata_processed.var_names].layers['unspliced'] + +# %% +adata.shape, adata_processed.shape + +# %% +adata_processed.obs.time_info.unique() + +# %% +adata_cospar = scv.read("../notebooks/LARRY_data/LARRY_MultiTimeClone_Later_FullSpace0_t*2.0*4.0*6_adata_with_transition_map.h5ad") + +# %% +adata_cytotrace = scv.read("../notebooks/larry_invitro_adata_sub_raw_withcytotrace.h5ad") + +# %% +cs.pl.fate_bias(adata_cospar, selected_fates=['Neutrophil','Monocyte'],used_Tmap='transition_map', + selected_times=[2,4], plot_target_state=False,map_backward=True,sum_fate_prob_thresh=0.1) + +# %% +import anndata + +def get_clone_trajectory(adata, average_start_point=True, global_traj=True, times=[2, 4, 6], clone_num=None): + if not average_start_point: + adata.obsm['clone_vector_emb'] = np.zeros((adata.shape[0], 2)) + + adatas = [] + clones = [] + centroids = [] + cen_clones = [] + print(adata.shape) + adata.obs['clones'] = 0 + if 'noWell' in adata.obs.columns: + for w in adata.obs.Well.unique(): + adata_w = adata[adata.obs.Well==w] + clone_adata_w = clone_adata[clone_adata.obs.Well == w] + for j in range(clone_adata_w.shape[1]): + adata_w.obs['clonei'] = 0 + # belongs to same clone + adata_w.obs.loc[clone_adata_w[:, j].X.toarray()[:, 0] >= 1, 'clonei'] = 1 + + if not average_start_point: + for i in np.where((adata_w.obs.time==2) & (adata_w.obs.clonei==1))[0]: + next_time = np.where((adata_w.obs.time==4) & (adata_w.obs.clonei==1))[0] + adata_w.obsm['velocity_umap'][i] = adata_w.obsm['X_umap'][next_time].mean(axis=0)-adata_w.obsm['X_umap'][i] + for i in np.where((adata_w.obs.time==4) & (adata_w.obs.clonei==1))[0]: + next_time = np.where((adata_w.obs.time==6) & (adata_w.obs.clonei==1))[0] + adata_w.obsm['velocity_umap'][i] = adata_w.obsm['X_umap'][next_time].mean(axis=0)-adata_w.obsm['X_umap'][i] + else: + time2 = np.where((adata_w.obs.time==2) & (adata_w.obs.clonei==1))[0] + time4 = np.where((adata_w.obs.time==4) & (adata_w.obs.clonei==1))[0] + time6 = np.where((adata_w.obs.time==6) & (adata_w.obs.clonei==1))[0] + if time2.shape[0] == 0 and time4.shape[0] == 0 and time6.shape[0] == 0: + continue + if time2.shape[0] > 0 and time4.shape[0] == 0 and time6.shape[0] > 0: + continue + adata_new = anndata.AnnData(np.vstack([adata_w[time2].X.toarray().mean(axis=0), + adata_w[time4].X.toarray().mean(axis=0), + adata_w[time6].X.toarray().mean(axis=0)]), + layers={'spliced': np.vstack([adata_w[time2].layers['spliced'].toarray().mean(axis=0), + adata_w[time4].layers['spliced'].toarray().mean(axis=0), + adata_w[time6].layers['spliced'].toarray().mean(axis=0)]), + 'unspliced': np.vstack([adata_w[time2].layers['unspliced'].toarray().mean(axis=0), + adata_w[time4].layers['unspliced'].toarray().mean(axis=0), + adata_w[time6].layers['unspliced'].toarray().mean(axis=0)])}, + var=adata_w.var) + + adata_new.obs.loc[:, 'time'] = [2, 4, 6] + adata_new.obs.loc[:, 'Cell type annotation'] = 'Centroid' + print(adata_w[time6].obs.clonetype.unique()) + print(adata_w[time6].obs) + + adata_new.obs.loc[:, 'clonetype'] = adata_w[time6].obs.clonetype.unique() # use cell fate from last time point + adata_new.obs.loc[:, 'clones'] = int(j) + if 'Well' in adata_w[time6].obs.columns: + adata_new.obs.loc[:, 'Well'] = adata_w[time6].obs.Well.unique() + + adata_new.obsm['X_umap'] = np.vstack([adata_w[time2].obsm['X_umap'].mean(axis=0), + adata_w[time4].obsm['X_umap'].mean(axis=0), + adata_w[time6].obsm['X_umap'].mean(axis=0)]) + adata_new.obsm['velocity_umap'] = np.vstack([adata_w.obsm['X_umap'][time4].mean(axis=0) - adata_w.obsm['X_umap'][time2].mean(axis=0), + adata_w.obsm['X_umap'][time6].mean(axis=0) - adata_w.obsm['X_umap'][time4].mean(axis=0), + np.zeros(2)]) + centroids.append(adata_new) + clone_new = anndata.AnnData(np.vstack([clone_adata_w[time2].X.toarray().mean(axis=0), + clone_adata_w[time4].X.toarray().mean(axis=0), + clone_adata_w[time6].X.toarray().mean(axis=0)]), + obs=adata_new.obs) + clone_new.var_names = clone_adata.var_names + clone_new.var = clone_adata.var + print(clone_new.shape) + cen_clones.append(clone_new) + + adata_new = adata_w.concatenate(centroids[0].concatenate(centroids[1:]), join='outer') + clone_new = clone_adata_w.concatenate(cen_clones[0].concatenate(cen_clones[1:]), join='outer') + adatas.append(adata_new) + clones.append(clone_new) + return adatas[0].concatenate(adatas[1]), clones[0].concatenate(clones[1]) + else: + if clone_num is None: + clone_num = adata.obsm['X_clone'].shape[1] + for j in range(clone_num): + print(j) + adata.obs['clonei'] = 0 + print('----------aa------') + if issparse(adata.obsm['X_clone']): + adata.obs.loc[adata.obsm['X_clone'].toarray()[:, j] >= 1, 'clonei'] = 1 + else: + adata.obs.loc[adata.obsm['X_clone'][:, j] >= 1, 'clonei'] = 1 + print('----------bb------') + + if not average_start_point: + for i in np.where((adata.obs.time==2) & (adata.obs.clonei==1))[0]: + next_time = np.where((adata.obs.time==4) & (adata.obs.clonei==1))[0] + adata.obsm['velocity_umap'][i] = adata.obsm['X_umap'][next_time].mean(axis=0)-adata.obsm['X_umap'][i] + for i in np.where((adata.obs.time==4) & (adata.obs.clonei==1))[0]: + next_time = np.where((adata.obs.time==6) & (adata.obs.clonei==1))[0] + adata.obsm['velocity_umap'][i] = adata.obsm['X_umap'][next_time].mean(axis=0)-adata.obsm['X_umap'][i] + else: + if global_traj: + times_index = [] + for t in times: + times_index.append(np.where((adata.obs.time_info==t) & (adata.obs.clonei==1))[0]) + + consecutive_flag = np.array([int(time.shape[0] > 0) for time in times_index]) + consecutive = np.diff(consecutive_flag) + if np.sum(consecutive_flag == 1) >= 2 and np.any(consecutive == 0): # Must be consecutive time points + print('centroid:', consecutive, times_index) + adata_new = anndata.AnnData(np.vstack([np.array(adata[time].X.mean(axis=0)).squeeze() for time in times_index if time.shape[0] > 0]), +# layers={'spliced': +# np.vstack([np.array(adata[time].layers['spliced'].mean(axis=0)) for time in times_index if time.shape[0] > 0]), +# 'unspliced': +# np.vstack([np.array(adata[time].layers['unspliced'].mean(axis=0)) for time in times_index if time.shape[0] > 0]) +# }, + var=adata.var) + print('----------cc------') + adata.obs.iloc[np.hstack([time for time in times_index if time.shape[0] > 0]), adata.obs.columns.get_loc('clones')] = int(j) + adata_new.obs.loc[:, 'time'] = [t for t, time in zip([2, 4, 6], times_index) if time.shape[0] > 0] + adata_new.obs.loc[:, 'clones'] = int(j) + adata_new.obs.loc[:, 'state_info'] = 'Centroid' + adata_new.obsm['X_emb'] = np.vstack([adata[time].obsm['X_emb'].mean(axis=0) + for time in times_index if time.shape[0] > 0]) + print('----------dd------') + + #print(adata_new.shape) + #print(adata_new.obsm['X_umap']) + adata_new.obsm['clone_vector_emb'] = np.vstack([adata_new.obsm['X_emb'][i+1] - adata_new.obsm['X_emb'][i] + for i in range(adata_new.obsm['X_emb'].shape[0]-1)] + [np.zeros(2)]) + print('----------ee------') + print(adata_new.obsm['clone_vector_emb']) + else: + print('pass-------') + continue + + else: + time2 = np.where((adata.obs.time==t) & (adata.obs.clonei==1))[0] + time4 = np.where((adata.obs.time==4) & (adata.obs.clonei==1))[0] + time6 = np.where((adata.obs.time==6) & (adata.obs.clonei==1))[0] + adata_new = anndata.AnnData(np.vstack([adata[time2].X.toarray().mean(axis=0), + adata[time4].X.toarray().mean(axis=0), + adata[time6].X.toarray().mean(axis=0)]), + layers={'spliced': np.vstack([adata[time2].layers['spliced'].toarray().mean(axis=0), + adata[time4].layers['spliced'].toarray().mean(axis=0), + adata[time6].layers['spliced'].toarray().mean(axis=0)]), + 'unspliced': np.vstack([adata[time2].layers['unspliced'].toarray().mean(axis=0), + adata[time4].layers['unspliced'].toarray().mean(axis=0), + adata[time6].layers['unspliced'].toarray().mean(axis=0)])}, + var=adata.var) + + print(adata_new.X.sum(axis=1)) + adata_new.obs.loc[:, 'time'] = [2, 4, 6] + adata_new.obs.loc[:, 'Cell type annotation'] = 'Centroid' + if not global_traj: + adata_new.obs.loc[:, 'clonetype'] = adata[time6].obs.clonetype.unique() # use cell fate from last time point + adata_new.obs.loc[:, 'clones'] = j + + if 'noWell' in adata[time6].obs.columns: + adata_new.obs.loc[:, 'Well'] = adata[time6].obs.Well.unique() + + adata_new.obsm['X_umap'] = np.vstack([adata[time2].obsm['X_umap'].mean(axis=0), + adata[time4].obsm['X_umap'].mean(axis=0), + adata[time6].obsm['X_umap'].mean(axis=0)]) + adata_new.obsm['velocity_umap'] = np.vstack([adata.obsm['X_umap'][time4].mean(axis=0) - adata.obsm['X_umap'][time2].mean(axis=0), + adata.obsm['X_umap'][time6].mean(axis=0) - adata.obsm['X_umap'][time4].mean(axis=0), + np.zeros(2)]) + + print(adata_new.obsm['velocity_umap']) + clone_new = anndata.AnnData(np.vstack([clone_adata[time2].X.toarray().mean(axis=0), + clone_adata[time4].X.toarray().mean(axis=0), + clone_adata[time6].X.toarray().mean(axis=0)]), + obs=adata_new.obs) + clone_new.var_names = clone_adata.var_names + clone_new.var = clone_adata.var + cen_clones.append(clone_new) + centroids.append(adata_new) + print(adata.shape) + print(len(centroids)) + adata_new = adata.concatenate(centroids[0].concatenate(centroids[1:]), join='outer') + return adata_new + + +# %% +adata.obs.head() + +# %% +state_global_test = get_clone_trajectory(adata) + +# %% +# state_global_test = scv.read("global_gold_standard2.h5ad") + +# %% +# state_global_test2 = get_clone_trajectory(adata) + +# %% +state_global_test.obsm + +# %% +fig, ax = plt.subplots() +fig.set_size_inches(9, 9) +scv.pl.velocity_embedding_grid(state_global_test, basis='emb', vkey='clone_vector', + arrow_size=3, arrow_color='black', + density=0.5, color='state_info', ax=ax, show=False, + scale=0.3, + legend_loc='right') + +# %% +fig, ax = plt.subplots() +fig.set_size_inches(9, 9) +scv.pl.velocity_embedding_stream(state_global_test, basis='emb', vkey='clone_vector', + arrow_size=3, arrow_color='black', + density=0.8, color='state_info', ax=ax, show=False, + legend_loc='right') + +# %% +cs.pl.fate_potency(adata_processed, used_Tmap='transition_map', + map_backward=True,method='norm-sum',color_bar=True,fate_count=True) + +adata_processed.uns['Tmap_cell_id_t1'].shape, adata_processed.uns['Tmap_cell_id_t2'].shape + +# %% +adata_test = adata_processed.copy() + +# %% +from scipy.sparse import csr_matrix +graph = np.zeros((adata_processed.shape[0], adata_processed.shape[0])) +for index, t1 in enumerate(adata_processed.uns['Tmap_cell_id_t1']): + graph[t1, adata_processed.uns['Tmap_cell_id_t2']] = adata_processed.uns['transition_map'][index].toarray() +adata_test.uns['velocity_graph'] = csr_matrix(graph) +scv.tl.velocity_embedding(adata_test, basis='emb') + +fig, ax = plt.subplots() +fig.set_size_inches(9, 9) +scv.pl.velocity_embedding_grid(adata_test, basis='emb', vkey='velocity', color='state_info',show=False, + scale=0.3, ax=ax, arrow_size=3, arrow_color='red', + density=0.5, + legend_loc='right') + +scv.pl.velocity_embedding_grid(adata_processed, basis='emb', vkey='velocity',color='state_info',show=False, + scale=0.3, ax=ax, arrow_size=3, arrow_color='blue', + density=0.5, + legend_loc='right') + +scv.pl.velocity_embedding_grid(state_global_test, basis='emb', vkey='clone_vector', color='state_info',show=False, + scale=0.3, ax=ax, arrow_size=3, arrow_color='black', + density=0.5, + legend_loc='right') + +# %% +graph = np.zeros((adata_processed.shape[0], adata_processed.shape[0]), dtype=np.uint8) + +# %% +adata_test.uns['velocity_graph'] = csr_matrix(graph) +scv.tl.velocity_embedding(adata_test, basis='emb') + +# %% +fig, ax = plt.subplots() +fig.set_size_inches(9, 9) +scv.pl.velocity_embedding_grid(adata_test, basis='emb', vkey='velocity', color='state_info',show=False, + scale=0.3, ax=ax, arrow_size=3, arrow_color='red', + density=0.5, + legend_loc='right') + +scv.pl.velocity_embedding_grid(adata_processed, basis='emb', vkey='velocity',color='state_info',show=False, + scale=0.3, ax=ax, arrow_size=3, arrow_color='blue', + density=0.5, + legend_loc='right') + +scv.pl.velocity_embedding_grid(state_global_test, basis='emb', vkey='clone_vector', color='state_info',show=False, + scale=0.3, ax=ax, arrow_size=3, arrow_color='black', + density=0.5, + legend_loc='right') + +# %% +from scipy.sparse import csr_matrix +graph = np.zeros((adata_processed.shape[0], adata_processed.shape[0])) +for index, t1 in enumerate(adata_processed.uns['Tmap_cell_id_t1']): + graph[t1, adata_processed.uns['Tmap_cell_id_t2']] = adata_processed.uns['transition_map'][index].toarray() +adata_test.uns['velocity_graph'] = csr_matrix(graph) +scv.tl.velocity_embedding(adata_test, basis='emb') + +# %% +fig, ax = plt.subplots(1, 3) +fig.set_size_inches(18, 5) + +adata_processed.obs['fate_bias'] = np.nan +adata_processed.obs.loc[adata_processed.obs['fate_bias_Neutrophil_Monocyte']!=0.5, 'fate_bias'] = adata_processed.obs['fate_bias_Neutrophil_Monocyte'][adata_processed.obs['fate_bias_Neutrophil_Monocyte']!=0.5] + +scv.pl.scatter(adata_processed, basis='emb', color='fate_bias', cmap='RdBu_r', show=False, ax=ax[0]) + +scv.pl.scatter(adata_processed, basis='emb', color='fate_potency', cmap='inferno', show=False, ax=ax[1]) + +scv.pl.velocity_embedding_grid(adata_test, basis='emb', vkey='velocity', color='state_info', show=False, + scale=0.3, ax=ax[2], arrow_size=3, arrow_color='red', + density=0.5, + legend_loc='right') + +scv.pl.velocity_embedding_grid(adata_processed, basis='emb', vkey='velocity',color='state_info',show=False, + scale=0.3, ax=ax[2], arrow_size=3, arrow_color='blue', + density=0.5, + legend_loc='right') + +scv.pl.velocity_embedding_grid(state_global_test, basis='emb', vkey='clone_vector', color='state_info',show=False, + scale=0.3, ax=ax[2], arrow_size=3, arrow_color='black', + density=0.5, + legend_loc='right') +ax[2].legend(bbox_to_anchor=[0.1, -0.03], ncol=5, fontsize=7) + +# %% +fig.savefig("Fig3_goldstandard.pdf", facecolor=fig.get_facecolor(), bbox_inches='tight', edgecolor='none', dpi=300) + +# %% +state_global_test.shape + +# %% +adata_processed.write("bifurcation_all_cells_withfatebias.h5ad") +adata_test.write("global_gold_standard.h5ad") +state_global_test.write("global_gold_standard2.h5ad") + +# %% +# !du -sh *h5ad + +# %% +state_global_test.obs.head() + +# %% +state_global_test.obs.loc[:, 'state_info'].cat.categories + + +# %% [markdown] +# # Local gold standard + +# %% +##choose clones that span three time points and go into the specified branches +def select_clones(df_metadata,df_clones,ratio=1.0,cutoff_timepoints=2, + celltypes=['Neutrophil','Monocyte','Baso','Mast','Meg']): + import pandas as pd + ids = np.where(df_clones) + df_tags = pd.DataFrame(data=ids[1], + index=ids[0],columns=['Tag_0']) + print(df_tags.head()) + clones_selected = list() + clones_truth = pd.DataFrame(columns=['celltype']) + for x in np.sort(df_tags['Tag_0'].unique()): + cells_x = df_tags['Tag_0'][df_tags['Tag_0']==x].index + #the number of spanned timepoints for clone x + n_timepoints_x = len(df_metadata.iloc[cells_x, df_metadata.columns.get_loc('time_info')].unique()) + if(n_timepoints_x>cutoff_timepoints): + #the ratio of cells falling into a specific cell type + cells_x_selected = cells_x[df_metadata.iloc[cells_x, df_metadata.columns.get_loc('time_info')]==6] + list_anno_x = df_metadata.iloc[cells_x_selected, df_metadata.columns.get_loc('state_info')].tolist() + celltype = max(set(list_anno_x), key = list_anno_x.count) + pct_celltype = np.float(list_anno_x.count(celltype))/len(list_anno_x) + + if((celltype in celltypes) and (pct_celltype==ratio)): + clones_selected.append(x) + clones_truth.loc[x,] = celltype + return clones_selected, clones_truth + + +# %% +from scipy.sparse import csr_matrix +clones_selected, clones_truth = select_clones(adata.obs, adata.obsm['X_clone'].toarray(), + celltypes=['Neutrophil','Monocyte']) +adata_processed_filter = adata.copy() +adata_processed_filter.obsm['X_clone'] = adata.obsm['X_clone'].toarray()[:, clones_selected] + +id_cells = np.where(adata_processed_filter.obsm['X_clone'].sum(axis=1)>0)[0] +adata_processed_filter = adata_processed_filter[id_cells, :] +adata_processed_filter_res = get_clone_trajectory(adata_processed_filter, clone_num=None) + +# %% +adata_processed_filter_res.write("bifurcation_unipotent_cells.h5ad") + + +# %% +def plot_a_clone(adata, i, ax): + scv.pl.scatter(adata[adata.obs['clones']!=i, :], color='state_info', basis='emb', + alpha=0.6, s=15, ax=ax, show=False, legend_loc='right margin', + ) + scv.pl.scatter(adata[(adata.obs['clones']==i) & (~np.isnan(adata.obs['clones'])), :], + color='time_info', basis='emb', + alpha=0.6, s=200, ax=ax, show=False, legend_loc='right margin', cmap='inferno', + ) + scv.pl.velocity_embedding(adata[adata.obs['clones']==i, :], color='black',linewidth=1,arrow_size=5, + basis='emb', vkey='clone_vector',show=False,ax=ax, s=1, alpha=1, + title=f'clone {i}') + + +# %% +adata_processed_filter_res[adata_processed_filter_res.obs.clones==0].obs + +# %% +ann2 = dict(zip(adata_processed_filter_res.obs.loc[:, 'state_info'].cat.categories, + adata_processed_filter_res.obs.loc[:, 'state_info'].cat.categories)) +ann2['Undifferentiated'] = 'Undiff' +ann2['Ba'] = 'O' +ann2['Er'] = 'O' +ann2['Mk'] = 'O' +ann2['cDC'] = 'O' +ann2['pDC1'] = 'O' +ann2['pDC2'] = 'O' + +# %% +ann2 + +# %% +adata_processed_filter_res.obs['state_info'] = adata_processed_filter_res.obs['state_info'].map(ann2) + +# %% +adata_processed_filter_res.obs.loc[adata_processed_filter_res.obs.clones==257,:] + +# %% +fig, ax = plt.subplots(1, 3) +fig.set_size_inches(15, 4) +plot_a_clone(adata_processed_filter_res, 3, ax[0]) +plot_a_clone(adata_processed_filter_res, 3, ax[1]) +scv.pl.velocity_embedding_grid(adata_processed_filter_res, scale=0.25, color='state_info', show=False, + s=20, density=0.5, arrow_size=2.5, linewidth=1, basis='emb', + vkey='clone_vector', + ax=ax[2], title='Clonal progression', arrow_color='black') +#plot_a_clone(adata_processed, 1, ) + +# %% +from scipy.sparse import csr_matrix +clones_selected, clones_truth = select_clones(adata.obs, adata.obsm['X_clone'].toarray(), + celltypes=['Neutrophil']) +adata_processed_filter = adata.copy() +adata_processed_filter.obsm['X_clone'] = adata.obsm['X_clone'].toarray()[:, clones_selected] + +id_cells = np.where(adata_processed_filter.obsm['X_clone'].sum(axis=1)>0)[0] +adata_processed_filter = adata_processed_filter[id_cells, :] +adata_processed_filter_res = get_clone_trajectory(adata_processed_filter, clone_num=None) + +# %% +adata_processed_filter_res.shape + +# %% +adata_processed_filter_res.write("neu_unipotent_cells.h5ad") + +# %% +from scipy.sparse import csr_matrix +clones_selected, clones_truth = select_clones(adata.obs, adata.obsm['X_clone'].toarray(), + celltypes=['Monocyte']) +adata_processed_filter = adata.copy() +adata_processed_filter.obsm['X_clone'] = adata.obsm['X_clone'].toarray()[:, clones_selected] + +id_cells = np.where(adata_processed_filter.obsm['X_clone'].sum(axis=1)>0)[0] +adata_processed_filter = adata_processed_filter[id_cells, :] +adata_processed_filter_res = get_clone_trajectory(adata_processed_filter, clone_num=None) + +# %% +adata_processed_filter_res.write("mono_unipotent_cells.h5ad") + +# %% +adata_processed_filter_res.shape + +# %% +# !du -sh *h5ad + +# %% From 3bb108bd9d693a5e3e88fae16a13260bebf9315a Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:53:20 -0400 Subject: [PATCH 21/32] chore(analysis): add subpopulation extraction from archive Signed-off-by: Cameron Smith --- src/pyrovelocity/analysis/subpopulation.py | 167 +++++++++++++++++++++ 1 file changed, 167 insertions(+) create mode 100644 src/pyrovelocity/analysis/subpopulation.py diff --git a/src/pyrovelocity/analysis/subpopulation.py b/src/pyrovelocity/analysis/subpopulation.py new file mode 100644 index 000000000..ba11de953 --- /dev/null +++ b/src/pyrovelocity/analysis/subpopulation.py @@ -0,0 +1,167 @@ +""" +Functions for extracting clonal subpopulations from datasets with clone information. +""" + +import numpy as np +import pandas as pd +import anndata +from scipy.sparse import issparse +from typing import List, Optional, Tuple +from beartype import beartype + +from pyrovelocity.logging import configure_logging + +logger = configure_logging(__name__) + + +@beartype +def select_clones( + df_metadata: pd.DataFrame, + df_clones: np.ndarray, + ratio: float = 1.0, + cutoff_timepoints: int = 2, + celltypes: List[str] = ["Neutrophil", "Monocyte", "Baso", "Mast", "Meg"], +) -> Tuple[List[int], pd.DataFrame]: + """ + Select clones that span multiple timepoints and differentiate into specified cell + types. + + Args: + df_metadata: DataFrame containing cell metadata + df_clones: Binary matrix of clone assignments + ratio: Minimum ratio of cells in a clone that must be of the same cell type + cutoff_timepoints: Minimum number of timepoints a clone must span + celltypes: List of cell types to consider + + Returns: + Tuple containing: + - List of selected clone indices + - DataFrame mapping clone indices to cell types + """ + ids = np.where(df_clones) + df_tags = pd.DataFrame(data=ids[1], index=ids[0], columns=["Tag_0"]) + + clones_selected = list() + clones_truth = pd.DataFrame(columns=["celltype"]) + + for x in np.sort(df_tags["Tag_0"].unique()): + cells_x = df_tags["Tag_0"][df_tags["Tag_0"] == x].index + n_timepoints_x = len( + df_metadata.iloc[ + cells_x, df_metadata.columns.get_loc("time_info") + ].unique() + ) + + if n_timepoints_x > cutoff_timepoints: + cells_x_selected = cells_x[ + df_metadata.iloc[ + cells_x, df_metadata.columns.get_loc("time_info") + ] + == 6 + ] + list_anno_x = df_metadata.iloc[ + cells_x_selected, df_metadata.columns.get_loc("state_info") + ].tolist() + + if len(list_anno_x) > 0: + celltype = max(set(list_anno_x), key=list_anno_x.count) + pct_celltype = float(list_anno_x.count(celltype)) / len( + list_anno_x + ) + + if (celltype in celltypes) and (pct_celltype >= ratio): + clones_selected.append(int(x)) + clones_truth.loc[x,] = celltype + + return clones_selected, clones_truth + + +@beartype +def extract_clonal_subpopulation( + adata: anndata.AnnData, + cell_type: Optional[str] = None, + cell_types: Optional[List[str]] = None, + ratio: float = 1.0, + cutoff_timepoints: int = 2, +) -> anndata.AnnData: + """ + Extract cells belonging to clones that differentiate into specified cell type(s). + + Args: + adata: AnnData object with clone information in .obsm['X_clone'] + cell_type: Single cell type to extract (e.g., 'Neutrophil') + cell_types: List of cell types to extract (e.g., ['Neutrophil', 'Monocyte']) + ratio: Minimum ratio of cells in a clone that must be of the same cell type + cutoff_timepoints: Minimum number of timepoints a clone must span + + Returns: + AnnData object containing only cells from the selected clones + """ + if cell_type is not None and cell_types is not None: + raise ValueError("Specify either cell_type or cell_types, not both") + + if cell_type is not None: + cell_types = [cell_type] + elif cell_types is None: + raise ValueError("Must specify either cell_type or cell_types") + + clone_matrix = adata.obsm["X_clone"] + if issparse(clone_matrix): + clone_matrix = clone_matrix.toarray() + + clones_selected, _ = select_clones( + adata.obs, + clone_matrix, + ratio=ratio, + cutoff_timepoints=cutoff_timepoints, + celltypes=cell_types, + ) + + adata_filtered = adata.copy() + adata_filtered.obsm["X_clone"] = clone_matrix[:, clones_selected] + + # Keep only cells that belong to at least one selected clone + id_cells = np.where(adata_filtered.obsm["X_clone"].sum(axis=1) > 0)[0] + adata_filtered = adata_filtered[id_cells, :] + + logger.info( + f"Extracted {adata_filtered.n_obs} cells from {len(clones_selected)} clones" + ) + + return adata_filtered + + +@beartype +def create_larry_subpopulations( + output_dir: str = "data/external" +) -> Tuple[anndata.AnnData, anndata.AnnData, anndata.AnnData]: + """ + Create and save the larry_neu, larry_mono, and larry_multilineage datasets. + + Args: + output_dir: Directory to save the generated datasets + + Returns: + Tuple containing (larry_neu, larry_mono, larry_multilineage) AnnData objects + """ + import os + from pyrovelocity.io.datasets import larry + + adata_larry = larry() + + adata_neu = extract_clonal_subpopulation( + adata_larry, cell_type="Neutrophil", ratio=1.0, cutoff_timepoints=2 + ) + + adata_mono = extract_clonal_subpopulation( + adata_larry, cell_type="Monocyte", ratio=1.0, cutoff_timepoints=2 + ) + + adata_multi = adata_neu.concatenate(adata_mono) + + os.makedirs(output_dir, exist_ok=True) + adata_neu.write(os.path.join(output_dir, "larry_neu.h5ad")) + adata_mono.write(os.path.join(output_dir, "larry_mono.h5ad")) + adata_multi.write(os.path.join(output_dir, "larry_multilineage.h5ad")) + + return adata_neu, adata_mono, adata_multi From 634278e9a7d7505ee7d8d38722b4616fe384b66b Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:53:43 -0400 Subject: [PATCH 22/32] test(analysis): add subpopulation extraction smoke test Signed-off-by: Cameron Smith --- .../tests/analysis/test_subpopulation.py | 173 ++++++++++++++++++ 1 file changed, 173 insertions(+) create mode 100644 src/pyrovelocity/tests/analysis/test_subpopulation.py diff --git a/src/pyrovelocity/tests/analysis/test_subpopulation.py b/src/pyrovelocity/tests/analysis/test_subpopulation.py new file mode 100644 index 000000000..67f88455a --- /dev/null +++ b/src/pyrovelocity/tests/analysis/test_subpopulation.py @@ -0,0 +1,173 @@ +import numpy as np +import pandas as pd +import pytest +from anndata import AnnData + +from pyrovelocity.analysis.subpopulation import ( + create_larry_subpopulations, + extract_clonal_subpopulation, + select_clones, +) + + +def test_select_clones(adata_larry_multilineage_50_6): + adata = adata_larry_multilineage_50_6 + + n_cells = adata.n_obs + n_clones = 5 + df_clones = np.zeros((n_cells, n_clones)) + + for i in range(min(n_clones, n_cells // 10)): + df_clones[i * 10 : (i + 1) * 10, i] = 1 + + cell_types = ["Neutrophil", "Monocyte"] * (n_cells // 2) + if n_cells % 2 == 1: + cell_types.append("Neutrophil") + + timepoints = [] + for i in range(n_cells // 4): + timepoints.extend([1, 2, 3, 4]) + timepoints.extend([1, 2, 3, 4][: n_cells % 4]) + + time_info = [f"d{t}" for t in timepoints] + + state_info = [f"state_{i % 3}" for i in range(n_cells)] + + df_metadata = pd.DataFrame( + { + "cell_type": cell_types, + "timepoint": timepoints, + "time_info": time_info, + "state_info": state_info, + }, + index=adata.obs_names, + ) + + clone_ids, filtered_metadata = select_clones( + df_metadata=df_metadata, + df_clones=df_clones, + ratio=0.7, + cutoff_timepoints=2, + celltypes=["Neutrophil", "Monocyte"], + ) + + assert isinstance(clone_ids, list) + assert isinstance(filtered_metadata, pd.DataFrame) + + if len(clone_ids) == 0: + assert filtered_metadata.empty + else: + assert len(filtered_metadata) > 0 + + +def test_extract_clonal_subpopulation(adata_larry_multilineage_50_6): + adata = adata_larry_multilineage_50_6 + + if "cell_type" not in adata.obs: + if "celltype" in adata.obs: + adata.obs["cell_type"] = adata.obs["celltype"] + else: + adata.obs["cell_type"] = ["Neutrophil", "Monocyte"] * ( + adata.n_obs // 2 + ) + if adata.n_obs % 2 == 1: + adata.obs["cell_type"] = np.append( + adata.obs["cell_type"], ["Neutrophil"] + ) + + if "state_info" not in adata.obs: + adata.obs["state_info"] = [f"state_{i % 3}" for i in range(adata.n_obs)] + + n_cells = adata.n_obs + n_clones = 5 + clone_matrix = np.zeros((n_cells, n_clones)) + + for i in range(min(n_clones, n_cells // 10)): + clone_matrix[i * 10 : (i + 1) * 10, i] = 1 + + adata.obsm["X_clone"] = clone_matrix + + if "timepoint" not in adata.obs: + adata.obs["timepoint"] = [1, 2, 3, 4] * (adata.n_obs // 4) + [ + 1, + 2, + 3, + 4, + ][: adata.n_obs % 4] + + try: + neu_adata = extract_clonal_subpopulation( + adata, cell_type="Neutrophil", ratio=0.7, cutoff_timepoints=1 + ) + + assert isinstance(neu_adata, AnnData) + + if neu_adata.n_obs > 0: + assert all(neu_adata.obs["cell_type"] == "Neutrophil") + except Exception as e: + pytest.skip(f"No Neutrophil clones found in test data: {str(e)}") + + try: + mono_adata = extract_clonal_subpopulation( + adata, cell_type="Monocyte", ratio=0.7, cutoff_timepoints=1 + ) + + assert isinstance(mono_adata, AnnData) + + if mono_adata.n_obs > 0: + assert all(mono_adata.obs["cell_type"] == "Monocyte") + except Exception as e: + pytest.skip(f"No Monocyte clones found in test data: {str(e)}") + + +def test_create_larry_subpopulations(tmp_path, monkeypatch): + from pyrovelocity.io import datasets + + def mock_larry(): + n_obs = 50 + n_vars = 6 + X = np.random.rand(n_obs, n_vars) + + timepoints = [1, 2, 3, 4] * (n_obs // 4) + [1, 2, 3, 4][: n_obs % 4] + + obs = pd.DataFrame( + { + "cell_type": ["Neutrophil", "Monocyte"] * (n_obs // 2), + "timepoint": timepoints, + "time_info": [f"d{t}" for t in timepoints], + "state_info": [f"state_{i % 3}" for i in range(n_obs)], + } + ) + var = pd.DataFrame(index=[f"gene{i}" for i in range(n_vars)]) + + adata = AnnData(X=X, obs=obs, var=var) + + n_clones = 5 + clone_matrix = np.zeros((n_obs, n_clones)) + for i in range(min(n_clones, n_obs // 10)): + clone_matrix[i * 10 : (i + 1) * 10, i] = 1 + adata.obsm["X_clone"] = clone_matrix + + return adata + + monkeypatch.setattr("pyrovelocity.io.datasets.larry", mock_larry) + + output_dir = str(tmp_path) + try: + neu, mono, multi = create_larry_subpopulations(output_dir=output_dir) + + assert (tmp_path / "larry_neu.h5ad").exists() + assert (tmp_path / "larry_mono.h5ad").exists() + assert (tmp_path / "larry_multilineage.h5ad").exists() + + assert isinstance(neu, AnnData) + assert isinstance(mono, AnnData) + assert isinstance(multi, AnnData) + + if neu.n_obs > 0: + assert all(neu.obs["cell_type"] == "Neutrophil") + if mono.n_obs > 0: + assert all(mono.obs["cell_type"] == "Monocyte") + + except Exception as e: + pytest.skip(f"Failed to create subpopulations: {str(e)}") From 9b9491c7dc2115040048cf125da20dd52dab1122 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:54:26 -0400 Subject: [PATCH 23/32] fix(fixture): add function to generate larry data fixture Signed-off-by: Cameron Smith --- .../fixtures/generate_anndata_fixtures.py | 73 ++++++++++++++++++- 1 file changed, 72 insertions(+), 1 deletion(-) diff --git a/src/pyrovelocity/tests/fixtures/generate_anndata_fixtures.py b/src/pyrovelocity/tests/fixtures/generate_anndata_fixtures.py index b6870c6e4..1be921afb 100644 --- a/src/pyrovelocity/tests/fixtures/generate_anndata_fixtures.py +++ b/src/pyrovelocity/tests/fixtures/generate_anndata_fixtures.py @@ -11,9 +11,10 @@ import tempfile from pathlib import Path +import numpy as np from anndata import AnnData from beartype import beartype -from beartype.typing import List, Union +from beartype.typing import List from pyrovelocity.io.datasets import pancreas from pyrovelocity.io.serialization import ( @@ -219,6 +220,76 @@ def generate_postprocessed_pancreas_fixture_data( return trained_output_path, postprocessed_output_path +@beartype +def generate_larry_fixture_data( + output_path: str + | Path = "src/pyrovelocity/tests/data/larry_multilineage_50_6.json", + n_obs: int = 50, + genes: List[str] = ["Itgb2", "S100a9", "Fcer1g", "Lilrb4", "Vim", "Serbp1"], +) -> Path: + """ + Generate a test fixture for the Larry multilineage dataset with specific genes. + + Args: + output_path: Path to save the JSON fixture. + n_obs: Number of observations to keep. + genes: List of gene names to include. Defaults to a set of marker genes. + + Returns: + Path to the saved fixture. + """ + output_path = Path(output_path) + + from pyrovelocity.io.datasets import larry_multilineage + + adata: AnnData = larry_multilineage() + + available_genes = [gene for gene in genes if gene in adata.var_names] + if len(available_genes) < len(genes): + missing_genes = set(genes) - set(available_genes) + logger.warning( + f"Some requested genes are not in the dataset: {missing_genes}" + ) + + if not available_genes: + logger.error("None of the requested genes are in the dataset!") + var_indices = np.random.choice( + adata.n_vars, size=min(6, adata.n_vars), replace=False + ) + adata = adata[:, var_indices] + else: + adata = adata[:, available_genes] + + np.random.seed(42) + obs_indices = np.random.choice( + adata.n_obs, size=min(n_obs, adata.n_obs), replace=False + ) + adata = adata[obs_indices, :] + + original_anndata_string = anndata_string(adata) + print_anndata(adata) + + save_anndata_to_json(adata, output_path) + logger.info(f"Larry multilineage test fixture saved to {output_path}") + + try: + logger.info("Attempting to load the serialized Larry AnnData object...") + loaded_adata = load_anndata_from_json(output_path) + loaded_anndata_string = anndata_string(loaded_adata) + logger.info("Successfully loaded the serialized Larry AnnData object.") + print_string_diff( + text1=original_anndata_string, + text2=loaded_anndata_string, + diff_title="Original vs Loaded Larry AnnData", + ) + print_anndata(loaded_adata) + except Exception as e: + logger.error(f"Error loading serialized Larry AnnData object: {str(e)}") + + return output_path + + if __name__ == "__main__": generate_pancreas_fixture_data() generate_postprocessed_pancreas_fixture_data() + generate_larry_fixture_data() From cb4064c7e945039a756b76d82903884212341b67 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:55:02 -0400 Subject: [PATCH 24/32] test(data): add larry multilineage 50x6 fixture Signed-off-by: Cameron Smith --- .../tests/data/larry_multilineage_50_6.json | 1287 +++++++++++++++++ 1 file changed, 1287 insertions(+) create mode 100644 src/pyrovelocity/tests/data/larry_multilineage_50_6.json diff --git a/src/pyrovelocity/tests/data/larry_multilineage_50_6.json b/src/pyrovelocity/tests/data/larry_multilineage_50_6.json new file mode 100644 index 000000000..5be47bb3d --- /dev/null +++ b/src/pyrovelocity/tests/data/larry_multilineage_50_6.json @@ -0,0 +1,1287 @@ +{ + "shape": [50, 6], + "obs": { + "index": [ + "LSK_d6_2_1:TCCAGAAGTTCGTTCC-0-0", + "LSK_d4_2_3:ATGCGGAGCATACATT-0-0", + "d4_2_1:GGACTTCTACACTAAG-0-0", + "d2_1:AAAGCCCGGTAATCCC-0-1", + "d4_1_2:GTAAGCCGTGAGGTCT-0-0", + "LK_d4_1:TAAGGGCCTTTACCCT-0-0", + "LSK_d2_2:GGAGGTTTTACAAACT-0-1", + "LSK_d4_2_1:GTAAGCCGACGGTAGC-0-0", + "d4_2_2:AAGTATTGGCCTCTTT-0-0", + "LSK_d6_2_2:ATATGCAAGGAATAGA-0-0", + "LSK_d2_1:GTGCGAAGATATGCAT-0-0", + "d4_2_2:AACGGTAGCAACGTGG-0-1", + "LK_d6_1_2:CGTTGCCTAAGTAATC-0-1", + "LK_d4_1:AGAAAGTGCGAGTCTG-0-1", + "LK_d6_2_1:CTGTCGCAGGCTACTA-0-0", + "d2_2:GAGTGTACTTCCAGAC-0-0", + "LK_d2:AGTTTGTATTGAGGGT-0-0", + "d4_2_1:GTAAGCCGTTGGCGTT-0-0", + "LSK_d2_1:CTGTTTCCAAATCGTT-0-0", + "LSK_d6_2_2:ATCAATCGTATTGCCT-0-0", + "LSK_d4_2_3:TCCGCTGTCTACTTGT-0-1", + "LSK_d4_1_2:CCATGCATTGCTATTT-0-0", + "LSK_d4_1_1:ACGGTTGGTCACGTTT-0-0", + "d4_1_2:TAGTCTCTAAACAGGG-0-0", + "LSK_d6_2_2:CAGACTCGTTGATCTA-0-0", + "LK_d4_2:GATGCAGAGCAGGGTA-0-1", + "d4_2_1:TGAGAGCGAAATCAGA-0-0", + "LSK_d2_2:ACATGGACATACTCTT-0-0", + "d6_2_3:GGGCATCACAAGTCAT-0-1", + "LK_d4_1:GGTAAATCAGGAGCTT-0-1", + "LSK_d6_1_1:ATCCTAGTGGGCCAAT-0-0", + "LSK_d4_1_1:CCTTTGTCACTTCAAT-0-0", + "LSK_d4_1_3:GCGCTGATGGGAGGTA-0-1", + "LSK_d6_2_3:GTGCGAAGCTAGACCC-0-0", + "LSK_d4_2_3:TAGATCAAGTTCAACT-0-1", + "d4_1_1:TCCCTGGATGATGCCC-0-0", + "LSK_d4_1_3:AATAAGGACCATTTGA-0-0", + "LSK_d6_2_2:TCAAATGGTGTTCCAG-0-0", + "LSK_d2_1:ATGGGCTCCCGCAACT-0-0", + "d6_2_1:GCTAAGTTCATCTCCC-0-0", + "LK_d4_1:GGACTTCTGTCATATT-0-1", + "LK_d4_1:CCGTAGCTATGACTTT-0-1", + "d4_1_2:AGTTGAACGGAATAGA-0-1", + "LSK_d6_1_2:AAAGAAAGCTAGATTG-0-1", + "LSK_d2_1:CAAGGGTTGAAGTGCC-0-0", + "LSK_d4_2_2:AAGTCCAAGTCCATGT-0-1", + "LK_d4_2:GTGTCAGGAGAGGTGG-0-1", + "LSK_d6_1_3:ACAATCTTTTGGACTT-0-0", + "LSK_d6_1_1:CATTCATTTGGCCTGT-0-0", + "d4_2_2:GTCTGGAACTTCTGGA-0-1" + ], + "Library": [ + "LSK_d6_2_1", + "LSK_d4_2_3", + "d4_2_1", + "d2_1", + "d4_1_2", + "LK_d4_1", + "LSK_d2_2", + "LSK_d4_2_1", + "d4_2_2", + "LSK_d6_2_2", + "LSK_d2_1", + "d4_2_2", + "LK_d6_1_2", + "LK_d4_1", + "LK_d6_2_1", + "d2_2", + "LK_d2", + "d4_2_1", + "LSK_d2_1", + "LSK_d6_2_2", + "LSK_d4_2_3", + "LSK_d4_1_2", + "LSK_d4_1_1", + "d4_1_2", + "LSK_d6_2_2", + "LK_d4_2", + "d4_2_1", + "LSK_d2_2", + "d6_2_3", + "LK_d4_1", + "LSK_d6_1_1", + "LSK_d4_1_1", + "LSK_d4_1_3", + "LSK_d6_2_3", + "LSK_d4_2_3", + "d4_1_1", + "LSK_d4_1_3", + "LSK_d6_2_2", + "LSK_d2_1", + "d6_2_1", + "LK_d4_1", + "LK_d4_1", + "d4_1_2", + "LSK_d6_1_2", + "LSK_d2_1", + "LSK_d4_2_2", + "LK_d4_2", + "LSK_d6_1_3", + "LSK_d6_1_1", + "d4_2_2" + ], + "Cell barcode": [ + "TCCAGAAG-TTCGTTCC", + "ATGCGGAG-CATACATT", + "GGACTTCT-ACACTAAG", + "AAAGCCCG-GTAATCCC", + "GTAAGCCG-TGAGGTCT", + "TAAGGGCC-TTTACCCT", + "GGAGGTTT-TACAAACT", + "GTAAGCCG-ACGGTAGC", + "AAGTATTG-GCCTCTTT", + "ATATGCAA-GGAATAGA", + "GTGCGAAG-ATATGCAT", + "AACGGTAG-CAACGTGG", + "CGTTGCCT-AAGTAATC", + "AGAAAGTG-CGAGTCTG", + "CTGTCGCA-GGCTACTA", + "GAGTGTAC-TTCCAGAC", + "AGTTTGTA-TTGAGGGT", + "GTAAGCCG-TTGGCGTT", + "CTGTTTCC-AAATCGTT", + "ATCAATCG-TATTGCCT", + "TCCGCTGT-CTACTTGT", + "CCATGCAT-TGCTATTT", + "ACGGTTGG-TCACGTTT", + "TAGTCTCT-AAACAGGG", + "CAGACTCG-TTGATCTA", + "GATGCAGA-GCAGGGTA", + "TGAGAGCG-AAATCAGA", + "ACATGGAC-ATACTCTT", + "GGGCATCA-CAAGTCAT", + "GGTAAATC-AGGAGCTT", + "ATCCTAGT-GGGCCAAT", + "CCTTTGTC-ACTTCAAT", + "GCGCTGAT-GGGAGGTA", + "GTGCGAAG-CTAGACCC", + "TAGATCAA-GTTCAACT", + "TCCCTGGA-TGATGCCC", + "AATAAGGA-CCATTTGA", + "TCAAATGG-TGTTCCAG", + "ATGGGCTC-CCGCAACT", + "GCTAAGTT-CATCTCCC", + "GGACTTCT-GTCATATT", + "CCGTAGCT-ATGACTTT", + "AGTTGAAC-GGAATAGA", + "AAAGAAAG-CTAGATTG", + "CAAGGGTT-GAAGTGCC", + "AAGTCCAA-GTCCATGT", + "GTGTCAGG-AGAGGTGG", + "ACAATCTT-TTGGACTT", + "CATTCATT-TGGCCTGT", + "GTCTGGAA-CTTCTGGA" + ], + "time_info": [ + 6.0, 4.0, 4.0, 2.0, 4.0, 4.0, 2.0, 4.0, 4.0, 6.0, 2.0, 4.0, 6.0, 4.0, 6.0, 2.0, 2.0, 4.0, 2.0, + 6.0, 4.0, 4.0, 4.0, 4.0, 6.0, 4.0, 4.0, 2.0, 6.0, 4.0, 6.0, 4.0, 4.0, 6.0, 4.0, 4.0, 4.0, 6.0, + 2.0, 6.0, 4.0, 4.0, 4.0, 6.0, 2.0, 4.0, 4.0, 6.0, 6.0, 4.0 + ], + "Starting population": [ + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1+", + "Lin-Kit+Sca1-" + ], + "state_info": [ + "Monocyte", + "Undifferentiated", + "Monocyte", + "Undifferentiated", + "Monocyte", + "Monocyte", + "Undifferentiated", + "Undifferentiated", + "Undifferentiated", + "Monocyte", + "Undifferentiated", + "Neutrophil", + "Neutrophil", + "Neutrophil", + "Monocyte", + "Undifferentiated", + "Undifferentiated", + "Neutrophil", + "Undifferentiated", + "Monocyte", + "Undifferentiated", + "Monocyte", + "Undifferentiated", + "Monocyte", + "Monocyte", + "Neutrophil", + "Monocyte", + "Undifferentiated", + "Neutrophil", + "Neutrophil", + "Monocyte", + "Undifferentiated", + "Undifferentiated", + "Monocyte", + "Undifferentiated", + "Monocyte", + "Undifferentiated", + "Monocyte", + "Undifferentiated", + "Monocyte", + "Neutrophil", + "Neutrophil", + "Neutrophil", + "Neutrophil", + "Undifferentiated", + "Undifferentiated", + "Neutrophil", + "Monocyte", + "Monocyte", + "Neutrophil" + ], + "Well": [ + 2.0, 2.0, 2.0, 0.0, 1.0, 1.0, 0.0, 2.0, 2.0, 2.0, 0.0, 2.0, 1.0, 1.0, 2.0, 0.0, 0.0, 2.0, 0.0, + 2.0, 2.0, 1.0, 1.0, 1.0, 2.0, 2.0, 2.0, 0.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 2.0, + 0.0, 2.0, 1.0, 1.0, 1.0, 1.0, 0.0, 2.0, 2.0, 1.0, 1.0, 2.0 + ], + "SPRING-x": [ + 2700.854, 831.428, 2788.378, 388.715, 1422.334, 2909.622, 212.713, 1105.662, 581.941, + 2617.362, 393.305, 1745.886, 2504.045, 1479.389, 2938.281, 475.915, 585.414, 1749.528, + 255.098, 2148.064, 45.343, 2662.043, 1089.587, 2094.441, 2761.554, 1139.285, 2347.685, 38.661, + 2394.754, 862.068, 2479.112, 609.208, -108.622, 2913.969, 332.064, 2418.201, 125.499, + 2368.444, 297.961, 3012.502, 1041.288, 1045.92, 658.897, 639.602, 157.148, 337.053, 952.065, + 2769.392, 2352.814, 806.93 + ], + "SPRING-y": [ + 126.018, 166.762, -10.842, -306.311, -356.141, 247.128, 246.484, -204.106, -199.172, -77.62, + 745.536, -2142.738, -2006.018, -2223.724, 154.022, 486.693, 49.715, -2057.174, 828.944, + -157.97, 208.785, 340.057, -159.293, -289.377, 164.0, -2105.04, -383.32, 882.35, -2040.942, + -1758.182, 276.97, 214.233, -64.381, 278.925, -443.132, -193.811, -70.481, -236.294, 814.93, + 566.397, -1972.23, -2034.936, -1186.781, -1315.255, 393.916, -74.72, -1927.948, 189.098, + -467.925, -1756.369 + ], + "gcs": [ + 0.5805438120508694, 0.6997531835702828, 0.5847023611383371, 0.7297043028189776, + 0.7081150236805004, 0.5857218831916864, 0.7071824723361388, 0.6705278032755103, + 0.7387491225172702, 0.603662542417947, 0.7049511691584287, 0.5976888950845951, + 0.5684758584588009, 0.6007088009040336, 0.5852234089103625, 0.7453336564020443, + 0.746665886190189, 0.58584768639876, 0.7209385126438916, 0.6430221863321457, + 0.7080556360321362, 0.5800243273198451, 0.7097434492139695, 0.6284909092443718, + 0.5814721492469962, 0.6394623763244174, 0.6331157432566309, 0.70708146380513, + 0.5671632777815558, 0.6884261116910538, 0.5818869704798988, 0.7181726639479538, + 0.688464886638855, 0.5732843737494537, 0.7004915898121783, 0.6186360249158435, + 0.6997683077312096, 0.5988425350035116, 0.6897620216961021, 0.5926806669326682, + 0.6400445925701025, 0.6386909053232143, 0.7156547257711631, 0.6898675270798283, + 0.7383604233863538, 0.7228467916086586, 0.6603185345384909, 0.5900548871709503, + 0.5947142634869628, 0.6565264562695005 + ], + "cytotrace": [ + 0.13246927102348788, 0.662610036104012, 0.16098738387895015, 0.8640420266926291, + 0.725548659283599, 0.16766054115451706, 0.7191797493002312, 0.45598555839519694, + 0.9107744107744108, 0.25577055697537626, 0.702020202020202, 0.2327086122266845, + 0.054277716928319335, 0.2450204859843414, 0.1643746703987668, 0.9408746095493083, + 0.9461887955863859, 0.16851243357267454, 0.8107176179465336, 0.35783538193176745, + 0.7249604478520141, 0.12879802036428542, 0.736826092247779, 0.32262382864792505, + 0.13928441036874772, 0.34917447568049975, 0.333678146931159, 0.7185104052573932, + 0.0464078536367693, 0.5742971887550201, 0.14208348545697944, 0.7932132570686787, + 0.574662285505659, 0.08654821305423716, 0.6688166808648737, 0.2992576366070342, + 0.6627520181737049, 0.237495436290617, 0.5852906575798142, 0.20938298649142023, + 0.3505740132246156, 0.347186726704799, 0.7765202223033548, 0.5862845320676646, + 0.9087460954930835, 0.8225629791894852, 0.41004421727313295, 0.19552959311995458, + 0.21952456289805689, 0.3961502575960407 + ], + "counts": [ + 0.5805438120508694, 0.6997531835702828, 0.5847023611383371, 0.7297043028189776, + 0.7081150236805004, 0.5857218831916864, 0.7071824723361388, 0.6705278032755103, + 0.7387491225172702, 0.603662542417947, 0.7049511691584287, 0.5976888950845951, + 0.5684758584588009, 0.6007088009040336, 0.5852234089103625, 0.7453336564020443, + 0.746665886190189, 0.58584768639876, 0.7209385126438916, 0.6430221863321457, + 0.7080556360321362, 0.5800243273198451, 0.7097434492139695, 0.6284909092443718, + 0.5814721492469962, 0.6394623763244174, 0.6331157432566309, 0.70708146380513, + 0.5671632777815558, 0.6884261116910538, 0.5818869704798988, 0.7181726639479538, + 0.688464886638855, 0.5732843737494537, 0.7004915898121783, 0.6186360249158435, + 0.6997683077312096, 0.5988425350035116, 0.6897620216961021, 0.5926806669326682, + 0.6400445925701025, 0.6386909053232143, 0.7156547257711631, 0.6898675270798283, + 0.7383604233863538, 0.7228467916086586, 0.6603185345384909, 0.5900548871709503, + 0.5947142634869628, 0.6565264562695005 + ], + "clones": [ + 13, 59, 5, 130, 12, 44, 40, 13, 83, 57, 89, 132, 93, 50, 52, 117, 56, 93, 111, 123, 99, 80, + 49, 60, 89, 1, 4, 92, 92, 124, 95, 20, 14, 17, 44, 32, 84, 37, 48, 66, 75, 118, 128, 21, 17, + 85, 124, 111, 58, 3 + ], + "clonei": [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + "time": [ + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN, + NaN + ], + "batch": [ + "0", + "0", + "0", + "1", + "0", + "0", + "1", + "0", + "0", + "0", + "0", + "1", + "1", + "1", + "0", + "0", + "0", + "0", + "0", + "0", + "1", + "0", + "0", + "0", + "0", + "1", + "0", + "0", + "1", + "1", + "0", + "0", + "1", + "0", + "1", + "0", + "0", + "0", + "0", + "0", + "1", + "1", + "1", + "1", + "0", + "1", + "1", + "0", + "0", + "1" + ] + }, + "var": { + "index": ["Itgb2", "S100a9", "Fcer1g", "Lilrb4", "Vim", "Serbp1"], + "Accession": ["Itgb2", "S100a9", "Fcer1g", "Lilrb4", "Vim", "Serbp1"], + "Chromosome": ["10", "3", "1", "10", "2", "6"], + "End": [77565674, 90695691, 171234349, 51496611, 13582826, 67289302], + "Start": [77530348, 90692633, 171229572, 51490975, 13574311, 67266979], + "Strand": ["+", "-", "-", "+", "+", "+"], + "cytotrace": ["True", "True", "True", "True", "True", "True"], + "cytotrace_corrs": [ + "0.12845377042531272", + "0.006227611900008308", + "0.1670159799077079", + "-0.0268853787938926", + "0.2016285154446795", + "0.6623317488268362" + ] + }, + "X": [ + [4.0, 0.0, 9.0, 10.0, 21.0, 0.0], + [1.0, 0.0, 2.0, 0.0, 4.0, 3.0], + [11.0, 3.0, 11.0, 19.0, 31.0, 1.0], + [0.0, 0.0, 3.0, 0.0, 3.0, 6.0], + [5.0, 2.0, 1.0, 6.0, 9.0, 7.0], + [9.0, 0.0, 7.0, 7.0, 14.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 6.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 5.0, 6.0, 0.0, 3.0, 5.0], + [2.0, 0.0, 6.0, 6.0, 8.0, 1.0], + [0.0, 0.0, 1.0, 1.0, 2.0, 4.0], + [6.0, 222.0, 7.0, 0.0, 4.0, 0.0], + [5.0, 218.0, 13.0, 4.0, 3.0, 0.0], + [7.0, 154.0, 3.0, 2.0, 1.0, 0.0], + [14.0, 0.0, 8.0, 2.0, 7.0, 0.0], + [1.0, 0.0, 1.0, 1.0, 1.0, 5.0], + [0.0, 0.0, 5.0, 0.0, 1.0, 7.0], + [6.0, 137.0, 4.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 4.0, 4.0], + [3.0, 0.0, 3.0, 3.0, 17.0, 2.0], + [0.0, 0.0, 0.0, 0.0, 2.0, 4.0], + [12.0, 0.0, 11.0, 3.0, 12.0, 2.0], + [3.0, 0.0, 3.0, 1.0, 2.0, 6.0], + [12.0, 3.0, 16.0, 6.0, 19.0, 1.0], + [1.0, 0.0, 5.0, 1.0, 16.0, 0.0], + [6.0, 207.0, 10.0, 0.0, 3.0, 2.0], + [5.0, 7.0, 4.0, 5.0, 8.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 2.0, 6.0], + [6.0, 102.0, 4.0, 2.0, 5.0, 1.0], + [9.0, 267.0, 6.0, 1.0, 9.0, 0.0], + [1.0, 0.0, 6.0, 7.0, 9.0, 3.0], + [2.0, 0.0, 2.0, 1.0, 2.0, 9.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], + [9.0, 0.0, 8.0, 12.0, 20.0, 0.0], + [0.0, 0.0, 3.0, 0.0, 3.0, 1.0], + [5.0, 0.0, 4.0, 9.0, 15.0, 0.0], + [0.0, 0.0, 3.0, 0.0, 2.0, 3.0], + [5.0, 0.0, 2.0, 4.0, 13.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0, 2.0], + [22.0, 1.0, 16.0, 16.0, 39.0, 2.0], + [3.0, 175.0, 6.0, 0.0, 1.0, 0.0], + [5.0, 143.0, 11.0, 6.0, 4.0, 4.0], + [2.0, 26.0, 6.0, 2.0, 2.0, 6.0], + [3.0, 20.0, 2.0, 0.0, 1.0, 1.0], + [0.0, 0.0, 4.0, 1.0, 1.0, 10.0], + [0.0, 0.0, 1.0, 0.0, 0.0, 1.0], + [4.0, 265.0, 5.0, 1.0, 1.0, 0.0], + [7.0, 0.0, 5.0, 10.0, 18.0, 2.0], + [9.0, 0.0, 10.0, 4.0, 22.0, 1.0], + [6.0, 25.0, 2.0, 0.0, 1.0, 0.0] + ], + "layers": { + "ambiguous": [ + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0] + ], + "matrix": [ + [4.0, 0.0, 9.0, 10.0, 21.0, 0.0], + [1.0, 0.0, 2.0, 0.0, 4.0, 3.0], + [11.0, 3.0, 11.0, 19.0, 31.0, 1.0], + [0.0, 0.0, 3.0, 0.0, 3.0, 6.0], + [5.0, 2.0, 1.0, 6.0, 9.0, 7.0], + [9.0, 0.0, 7.0, 7.0, 14.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 6.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 5.0, 6.0, 0.0, 3.0, 5.0], + [2.0, 0.0, 6.0, 6.0, 8.0, 1.0], + [0.0, 0.0, 1.0, 1.0, 2.0, 4.0], + [6.0, 222.0, 7.0, 0.0, 4.0, 0.0], + [5.0, 218.0, 13.0, 4.0, 3.0, 0.0], + [7.0, 154.0, 3.0, 2.0, 1.0, 0.0], + [14.0, 0.0, 8.0, 2.0, 7.0, 0.0], + [1.0, 0.0, 1.0, 1.0, 1.0, 5.0], + [0.0, 0.0, 5.0, 0.0, 1.0, 7.0], + [6.0, 138.0, 4.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 4.0, 4.0], + [3.0, 0.0, 3.0, 3.0, 17.0, 2.0], + [0.0, 0.0, 0.0, 0.0, 2.0, 4.0], + [12.0, 0.0, 11.0, 3.0, 12.0, 2.0], + [3.0, 0.0, 3.0, 1.0, 2.0, 6.0], + [12.0, 3.0, 16.0, 6.0, 19.0, 1.0], + [1.0, 0.0, 5.0, 1.0, 16.0, 0.0], + [6.0, 207.0, 10.0, 0.0, 3.0, 2.0], + [5.0, 7.0, 4.0, 5.0, 8.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 2.0, 6.0], + [6.0, 102.0, 4.0, 2.0, 5.0, 1.0], + [9.0, 271.0, 6.0, 1.0, 9.0, 0.0], + [1.0, 0.0, 6.0, 7.0, 9.0, 3.0], + [2.0, 0.0, 2.0, 1.0, 2.0, 9.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], + [9.0, 0.0, 8.0, 12.0, 20.0, 0.0], + [0.0, 0.0, 3.0, 0.0, 3.0, 1.0], + [5.0, 0.0, 4.0, 9.0, 15.0, 0.0], + [0.0, 0.0, 3.0, 0.0, 2.0, 3.0], + [5.0, 0.0, 2.0, 4.0, 13.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0, 2.0], + [22.0, 1.0, 16.0, 16.0, 39.0, 2.0], + [3.0, 176.0, 6.0, 0.0, 1.0, 0.0], + [5.0, 143.0, 11.0, 6.0, 4.0, 4.0], + [2.0, 26.0, 6.0, 2.0, 2.0, 6.0], + [3.0, 20.0, 2.0, 0.0, 1.0, 1.0], + [0.0, 0.0, 5.0, 1.0, 1.0, 10.0], + [0.0, 0.0, 1.0, 0.0, 0.0, 1.0], + [4.0, 265.0, 5.0, 1.0, 1.0, 0.0], + [7.0, 0.0, 5.0, 10.0, 18.0, 2.0], + [9.0, 0.0, 10.0, 4.0, 22.0, 1.0], + [6.0, 26.0, 2.0, 0.0, 1.0, 0.0] + ], + "spliced": [ + [4.0, 0.0, 9.0, 10.0, 21.0, 0.0], + [1.0, 0.0, 2.0, 0.0, 4.0, 3.0], + [11.0, 3.0, 11.0, 19.0, 31.0, 1.0], + [0.0, 0.0, 3.0, 0.0, 3.0, 6.0], + [5.0, 2.0, 1.0, 6.0, 9.0, 7.0], + [9.0, 0.0, 7.0, 7.0, 14.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 6.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0], + [0.0, 5.0, 6.0, 0.0, 3.0, 5.0], + [2.0, 0.0, 6.0, 6.0, 8.0, 1.0], + [0.0, 0.0, 1.0, 1.0, 2.0, 4.0], + [6.0, 222.0, 7.0, 0.0, 4.0, 0.0], + [5.0, 218.0, 13.0, 4.0, 3.0, 0.0], + [7.0, 154.0, 3.0, 2.0, 1.0, 0.0], + [14.0, 0.0, 8.0, 2.0, 7.0, 0.0], + [1.0, 0.0, 1.0, 1.0, 1.0, 5.0], + [0.0, 0.0, 5.0, 0.0, 1.0, 7.0], + [6.0, 137.0, 4.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 4.0, 4.0], + [3.0, 0.0, 3.0, 3.0, 17.0, 2.0], + [0.0, 0.0, 0.0, 0.0, 2.0, 4.0], + [12.0, 0.0, 11.0, 3.0, 12.0, 2.0], + [3.0, 0.0, 3.0, 1.0, 2.0, 6.0], + [12.0, 3.0, 16.0, 6.0, 19.0, 1.0], + [1.0, 0.0, 5.0, 1.0, 16.0, 0.0], + [6.0, 207.0, 10.0, 0.0, 3.0, 2.0], + [5.0, 7.0, 4.0, 5.0, 8.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 2.0, 6.0], + [6.0, 102.0, 4.0, 2.0, 5.0, 1.0], + [9.0, 267.0, 6.0, 1.0, 9.0, 0.0], + [1.0, 0.0, 6.0, 7.0, 9.0, 3.0], + [2.0, 0.0, 2.0, 1.0, 2.0, 9.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], + [9.0, 0.0, 8.0, 12.0, 20.0, 0.0], + [0.0, 0.0, 3.0, 0.0, 3.0, 1.0], + [5.0, 0.0, 4.0, 9.0, 15.0, 0.0], + [0.0, 0.0, 3.0, 0.0, 2.0, 3.0], + [5.0, 0.0, 2.0, 4.0, 13.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 0.0, 2.0], + [22.0, 1.0, 16.0, 16.0, 39.0, 2.0], + [3.0, 175.0, 6.0, 0.0, 1.0, 0.0], + [5.0, 143.0, 11.0, 6.0, 4.0, 4.0], + [2.0, 26.0, 6.0, 2.0, 2.0, 6.0], + [3.0, 20.0, 2.0, 0.0, 1.0, 1.0], + [0.0, 0.0, 4.0, 1.0, 1.0, 10.0], + [0.0, 0.0, 1.0, 0.0, 0.0, 1.0], + [4.0, 265.0, 5.0, 1.0, 1.0, 0.0], + [7.0, 0.0, 5.0, 10.0, 18.0, 2.0], + [9.0, 0.0, 10.0, 4.0, 22.0, 1.0], + [6.0, 25.0, 2.0, 0.0, 1.0, 0.0] + ], + "unspliced": [ + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 4.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 1.0, 0.0, 0.0, 0.0, 0.0] + ] + }, + "obsm": { + "X_clone": [ + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ], + [ + 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, + 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 + ] + ], + "X_emb": [ + [2700.854, 126.018], + [831.428, 166.762], + [2788.378, -10.842], + [388.715, -306.311], + [1422.334, -356.141], + [2909.622, 247.128], + [212.713, 246.484], + [1105.662, -204.106], + [581.941, -199.172], + [2617.362, -77.62], + [393.305, 745.536], + [1745.886, -2142.738], + [2504.045, -2006.018], + [1479.389, -2223.724], + [2938.281, 154.022], + [475.915, 486.693], + [585.414, 49.715], + [1749.528, -2057.174], + [255.098, 828.944], + [2148.064, -157.97], + [45.343, 208.785], + [2662.043, 340.057], + [1089.587, -159.293], + [2094.441, -289.377], + [2761.554, 164.0], + [1139.285, -2105.04], + [2347.685, -383.32], + [38.661, 882.35], + [2394.754, -2040.942], + [862.068, -1758.182], + [2479.112, 276.97], + [609.208, 214.233], + [-108.622, -64.381], + [2913.969, 278.925], + [332.064, -443.132], + [2418.201, -193.811], + [125.499, -70.481], + [2368.444, -236.294], + [297.961, 814.93], + [3012.502, 566.397], + [1041.288, -1972.23], + [1045.92, -2034.936], + [658.897, -1186.781], + [639.602, -1315.255], + [157.148, 393.916], + [337.053, -74.72], + [952.065, -1927.948], + [2769.392, 189.098], + [2352.814, -467.925], + [806.93, -1756.369] + ], + "clone_vector_emb": [ + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN], + [NaN, NaN] + ] + } +} From 0b0388ac736f6ca15a179127cef17a15aef0f459 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:55:24 -0400 Subject: [PATCH 25/32] test(conftest): add adata_larry_multilineage_50_6 fixture Signed-off-by: Cameron Smith --- src/pyrovelocity/tests/conftest.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/pyrovelocity/tests/conftest.py b/src/pyrovelocity/tests/conftest.py index 1be92ff4d..ef2963608 100644 --- a/src/pyrovelocity/tests/conftest.py +++ b/src/pyrovelocity/tests/conftest.py @@ -41,6 +41,14 @@ def adata_postprocessed_pancreas_50_7(): return load_anndata_from_json(fixture_file_path) +@pytest.fixture +def adata_larry_multilineage_50_6(): + fixture_file_path = ( + files("pyrovelocity.tests.data") / "larry_multilineage_50_6.json" + ) + return load_anndata_from_json(fixture_file_path) + + @pytest.fixture def pancreas_model2_path(): return files("pyrovelocity.tests.data") / "models" / "pancreas_model2" From 43a9aef623a9181733898bfb547fd5813723a6fa Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:55:49 -0400 Subject: [PATCH 26/32] test(trajectory): add smoke tests for trajectory module Signed-off-by: Cameron Smith --- .../tests/analysis/test_trajectory.py | 95 +++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 src/pyrovelocity/tests/analysis/test_trajectory.py diff --git a/src/pyrovelocity/tests/analysis/test_trajectory.py b/src/pyrovelocity/tests/analysis/test_trajectory.py new file mode 100644 index 000000000..45c3ae80b --- /dev/null +++ b/src/pyrovelocity/tests/analysis/test_trajectory.py @@ -0,0 +1,95 @@ +import numpy as np +from anndata import AnnData + +from pyrovelocity.analysis.trajectory import ( + align_trajectory_diff, + get_clone_trajectory, +) + + +def test_get_clone_trajectory_all_clones(adata_larry_multilineage_50_6): + """Test get_clone_trajectory with all clones (clone_num=None).""" + adata = adata_larry_multilineage_50_6.copy() + + if "X_clone" not in adata.obsm: + n_cells = adata.n_obs + n_clones = 5 + clone_matrix = np.zeros((n_cells, n_clones)) + for i in range(min(n_clones, n_cells // 10)): + clone_matrix[i * 10 : (i + 1) * 10, i] = 1 + adata.obsm["X_clone"] = clone_matrix + + if "timepoint" not in adata.obs: + adata.obs["timepoint"] = [2, 4, 6] * (adata.n_obs // 3) + [2, 4, 6][ + : adata.n_obs % 3 + ] + + result = get_clone_trajectory(adata=adata, clone_num=None) + + assert isinstance(result, AnnData) + assert "clone_vector_emb" in result.obsm + + if "clone_id" in result.obs: + unique_clones = result.obs["clone_id"].nunique() + expected_clones = min(5, adata.n_obs // 10) # Based on our setup + assert unique_clones <= expected_clones + + +def test_align_trajectory_diff(adata_larry_multilineage_50_6): + """Smoke test for align_trajectory_diff function. + + This test verifies that align_trajectory_diff runs without errors and returns + the expected type of output, using the same pattern as observed in + plot_lineage_fate_correlation. + """ + adata = adata_larry_multilineage_50_6.copy() + + if "X_clone" not in adata.obsm: + n_cells = adata.n_obs + n_clones = 5 + clone_matrix = np.zeros((n_cells, n_clones)) + for i in range(min(n_clones, n_cells // 10)): + clone_matrix[i * 10 : (i + 1) * 10, i] = 1 + adata.obsm["X_clone"] = clone_matrix + + if "timepoint" not in adata.obs: + adata.obs["timepoint"] = [2, 4, 6] * (adata.n_obs // 3) + [2, 4, 6][ + : adata.n_obs % 3 + ] + + if "time_info" not in adata.obs: + adata.obs["time_info"] = adata.obs.get("timepoint", 0) + + if "X_emb" not in adata.obsm: + adata.obsm["X_emb"] = np.random.normal(size=(adata.n_obs, 2)) + + if "velocity_emb" not in adata.obsm: + adata.obsm["velocity_emb"] = np.random.normal(size=(adata.n_obs, 2)) + + adata_clone = get_clone_trajectory(adata, clone_num=None) + + adata2 = adata.copy() + + density = 0.35 + result = align_trajectory_diff( + [adata_clone, adata, adata2], + [ + adata_clone.obsm.get( + "clone_vector_emb", np.zeros((adata_clone.n_obs, 2)) + ), + adata.obsm.get("velocity_emb", np.zeros((adata.n_obs, 2))), + adata2.obsm.get("velocity_emb", np.zeros((adata2.n_obs, 2))), + ], + embed="emb", + density=density, + ) + + assert isinstance(result, np.ndarray) + # Result should be a 2D array with each row containing: + # [x, y, vx1, vy1, vx2, vy2, vx3, vy3] where: + # - (x, y) is the grid point + # - (vx1, vy1) is the vector from adata_clone + # - (vx2, vy2) is the vector from adata + # - (vx3, vy3) is the vector from adata2 + assert result.ndim == 2 + assert result.shape[1] == 8 # Grid points (2) + 3 vector fields (6) From ce26b549e5677717b50ea6130351f200c02719ff Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 00:56:59 -0400 Subject: [PATCH 27/32] fix(tasks): use vendored data for multilineage clone trajectory Signed-off-by: Cameron Smith --- .../tasks/time_fate_correlation.py | 51 +++++++------------ 1 file changed, 18 insertions(+), 33 deletions(-) diff --git a/src/pyrovelocity/tasks/time_fate_correlation.py b/src/pyrovelocity/tasks/time_fate_correlation.py index 2937187d1..83c8782c4 100644 --- a/src/pyrovelocity/tasks/time_fate_correlation.py +++ b/src/pyrovelocity/tasks/time_fate_correlation.py @@ -177,41 +177,26 @@ def create_time_lineage_fate_correlation_plot( if dataset_name == "larry_multilineage": logger.info( - "Creating multilineage clone trajectory from mono and neu subsets" + "Creating multilineage clone trajectory from mono and neu datasets" ) - if "state_info" in adata_pyrovelocity.obs: - mono_mask = adata_pyrovelocity.obs["state_info"].str.contains( - "Mono", case=False, na=False - ) - neu_mask = adata_pyrovelocity.obs["state_info"].str.contains( - "Neu", case=False, na=False - ) - - mono_adata = adata_pyrovelocity[mono_mask].copy() - neu_adata = adata_pyrovelocity[neu_mask].copy() - - logger.info( - f" - Generating mono trajectory with {mono_adata.n_obs} cells" - ) - mono_clone = get_clone_trajectory(mono_adata) - - logger.info( - f" - Generating neu trajectory with {neu_adata.n_obs} cells" - ) - neu_clone = get_clone_trajectory(neu_adata) - - logger.info(" - Concatenating mono and neu trajectories") - clone_trajectories[dataset_name] = mono_clone.concatenate( - neu_clone - ) - else: - logger.warning( - "Could not identify mono/neu cells in multilineage dataset. Generating unified trajectory." - ) - clone_trajectories[dataset_name] = get_clone_trajectory( - adata_pyrovelocity - ) + from pyrovelocity.io.datasets import larry_mono, larry_neu + + mono_adata = larry_mono() + neu_adata = larry_neu() + + logger.info( + f" - Generating mono trajectory with {mono_adata.n_obs} cells" + ) + mono_clone = get_clone_trajectory(mono_adata) + + logger.info( + f" - Generating neu trajectory with {neu_adata.n_obs} cells" + ) + neu_clone = get_clone_trajectory(neu_adata) + + logger.info(" - Concatenating mono and neu trajectories") + clone_trajectories[dataset_name] = mono_clone.concatenate(neu_clone) else: logger.info( f"Generating clone trajectory for {dataset_name} with {adata_pyrovelocity.n_obs} cells" From cc3b3d8d6e825b902f412c786e9145bddfe319bf Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 13:40:13 -0400 Subject: [PATCH 28/32] fix(subpopulation): update types Signed-off-by: Cameron Smith --- src/pyrovelocity/analysis/subpopulation.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/pyrovelocity/analysis/subpopulation.py b/src/pyrovelocity/analysis/subpopulation.py index ba11de953..64c6932db 100644 --- a/src/pyrovelocity/analysis/subpopulation.py +++ b/src/pyrovelocity/analysis/subpopulation.py @@ -4,10 +4,10 @@ import numpy as np import pandas as pd -import anndata -from scipy.sparse import issparse -from typing import List, Optional, Tuple +from anndata import AnnData from beartype import beartype +from beartype.typing import List, Optional, Tuple +from scipy.sparse import issparse from pyrovelocity.logging import configure_logging @@ -78,12 +78,12 @@ def select_clones( @beartype def extract_clonal_subpopulation( - adata: anndata.AnnData, + adata: AnnData, cell_type: Optional[str] = None, cell_types: Optional[List[str]] = None, ratio: float = 1.0, cutoff_timepoints: int = 2, -) -> anndata.AnnData: +) -> AnnData: """ Extract cells belonging to clones that differentiate into specified cell type(s). @@ -128,13 +128,18 @@ def extract_clonal_subpopulation( f"Extracted {adata_filtered.n_obs} cells from {len(clones_selected)} clones" ) + if cell_types: + logger.info( + f"Extracting cells from clones differentiated into: {', '.join(cell_types)}" + ) + return adata_filtered @beartype def create_larry_subpopulations( output_dir: str = "data/external" -) -> Tuple[anndata.AnnData, anndata.AnnData, anndata.AnnData]: +) -> Tuple[AnnData, AnnData, AnnData]: """ Create and save the larry_neu, larry_mono, and larry_multilineage datasets. @@ -145,6 +150,7 @@ def create_larry_subpopulations( Tuple containing (larry_neu, larry_mono, larry_multilineage) AnnData objects """ import os + from pyrovelocity.io.datasets import larry adata_larry = larry() From 77e24870d1f14d7e98e18bc29d533edba9df1eb0 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 15:46:39 -0400 Subject: [PATCH 29/32] fix(plots): prefer hexbin in lineage fate correlation plot Signed-off-by: Cameron Smith --- .../plots/_lineage_fate_correlation.py | 89 +++++++++++++------ 1 file changed, 64 insertions(+), 25 deletions(-) diff --git a/src/pyrovelocity/plots/_lineage_fate_correlation.py b/src/pyrovelocity/plots/_lineage_fate_correlation.py index 3c13c0bb5..cb105c617 100644 --- a/src/pyrovelocity/plots/_lineage_fate_correlation.py +++ b/src/pyrovelocity/plots/_lineage_fate_correlation.py @@ -16,6 +16,7 @@ from pyrovelocity.analysis.trajectory import align_trajectory_diff from pyrovelocity.io.compressedpickle import CompressedPickle from pyrovelocity.logging import configure_logging +from pyrovelocity.plots._common import set_colorbar from pyrovelocity.plots._time import plot_posterior_time from pyrovelocity.plots._uncertainty import ( get_posterior_sample_angle_uncertainty, @@ -48,6 +49,7 @@ def plot_lineage_fate_correlation( default_fontsize: int = 7, default_title_padding: int = 2, include_uncertainty_measures: bool = False, + plot_individual_obs: bool = False, ) -> List[Axes] | np.ndarray: """ Plot lineage fate correlation with shared latent time estimates. @@ -73,6 +75,8 @@ def plot_lineage_fate_correlation( default_title_padding (int, optional): Default title padding. Defaults to 2. include_uncertainty_measures (bool, optional): Whether to include uncertainty measures. Defaults to False. + plot_individual_obs (bool, optional): Whether to plot individual observations + instead of using hexbin. Defaults to False. Returns: List[Axes] | np.ndarray: The axes objects. @@ -342,21 +346,38 @@ def plot_lineage_fate_correlation( scatter_dotsize_factor = 3 # SHIFT AXIS INDEX ax, current_axis_index = get_next_axis(all_axes, current_axis_index) - scv.pl.scatter( - adata=adata_cospar_obs_subset, - basis="emb", - fontsize=default_fontsize, - color="fate_potency_transition_map", - cmap="inferno_r", - show=False, - ax=ax, - s=dotsize * scatter_dotsize_factor, - colorbar=show_colorbars, - title="", - ) + + if plot_individual_obs: + scv.pl.scatter( + adata=adata_cospar_obs_subset, + basis="emb", + fontsize=default_fontsize, + color="fate_potency_transition_map", + cmap="inferno_r", + show=False, + ax=ax, + s=dotsize * scatter_dotsize_factor, + colorbar=show_colorbars, + title="", + ) + else: + im = ax.hexbin( + x=adata_cospar_obs_subset.obsm[f"X_emb"][:, 0], + y=adata_cospar_obs_subset.obsm[f"X_emb"][:, 1], + C=adata_cospar_obs_subset.obs["fate_potency_transition_map"], + gridsize=100, + cmap="inferno_r", + linewidths=0, + edgecolors="none", + reduce_C_function=np.mean, + ) + if show_colorbars: + set_colorbar( + im, ax, labelsize=default_fontsize, fig=fig, position="right" + ) + ax.axis("off") if show_titles: - # ax.set_title("Clonal fate potency", fontsize=default_fontsize) ax.set_title( "Fate potency", fontsize=default_fontsize, @@ -371,18 +392,36 @@ def plot_lineage_fate_correlation( # SHIFT AXIS INDEX ax, current_axis_index = get_next_axis(all_axes, current_axis_index) - scv.pl.scatter( - adata=adata_scvelo, - c="latent_time", - basis="emb", - s=dotsize * scatter_dotsize_factor, - cmap="inferno", - ax=ax, - show=False, - fontsize=default_fontsize, - colorbar=show_colorbars, - title="", - ) + + if plot_individual_obs: + scv.pl.scatter( + adata=adata_scvelo, + c="latent_time", + basis="emb", + s=dotsize * scatter_dotsize_factor, + cmap="inferno", + ax=ax, + show=False, + fontsize=default_fontsize, + colorbar=show_colorbars, + title="", + ) + else: + im = ax.hexbin( + x=adata_scvelo.obsm[f"X_emb"][:, 0], + y=adata_scvelo.obsm[f"X_emb"][:, 1], + C=adata_scvelo.obs["latent_time"], + gridsize=100, + cmap="inferno", + linewidths=0, + edgecolors="none", + reduce_C_function=np.mean, + ) + if show_colorbars: + set_colorbar( + im, ax, labelsize=default_fontsize, fig=fig, position="right" + ) + ax.axis("off") if show_titles: ax.set_title( From e02e02feae1d8a799e374612a4fc8d52a4b3cbf9 Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 16:17:53 -0400 Subject: [PATCH 30/32] fix(plots): downsample cluster data for larger than 5000 obs Signed-off-by: Cameron Smith --- .../plots/_lineage_fate_correlation.py | 54 ++++++++++++++++--- 1 file changed, 46 insertions(+), 8 deletions(-) diff --git a/src/pyrovelocity/plots/_lineage_fate_correlation.py b/src/pyrovelocity/plots/_lineage_fate_correlation.py index cb105c617..f3daf72fa 100644 --- a/src/pyrovelocity/plots/_lineage_fate_correlation.py +++ b/src/pyrovelocity/plots/_lineage_fate_correlation.py @@ -159,17 +159,54 @@ def plot_lineage_fate_correlation( # SHIFT AXIS INDEX ax, current_axis_index = get_next_axis(all_axes, current_axis_index) - res = pd.DataFrame( - { - "X": adata_pyrovelocity.obsm["X_emb"][:, 0], - "Y": adata_pyrovelocity.obsm["X_emb"][:, 1], - "cell_type": adata_pyrovelocity.obs.state_info, - } - ) + if plot_individual_obs or adata_pyrovelocity.n_obs < 5000: + plot_data = pd.DataFrame( + { + "X": adata_pyrovelocity.obsm["X_emb"][:, 0], + "Y": adata_pyrovelocity.obsm["X_emb"][:, 1], + "cell_type": adata_pyrovelocity.obs.state_info, + } + ) + else: + cell_counts = adata_pyrovelocity.obs.state_info.value_counts() + smallest_cluster_size = cell_counts.min() + min_representation = max(50, int(smallest_cluster_size * 0.9)) + max_total_cells = 5000 + obs_indices = [] + + for ct in adata_pyrovelocity.obs.state_info.cat.categories: + mask = adata_pyrovelocity.obs.state_info == ct + n_cells = np.sum(mask) + if n_cells <= min_representation: + sample_size = n_cells + else: + proportion = n_cells / adata_pyrovelocity.n_obs + sample_size = max( + min_representation, + min(n_cells, int(max_total_cells * proportion)), + ) + if sample_size >= n_cells: + ct_indices = np.where(mask)[0] + else: + ct_indices = np.random.choice( + np.where(mask)[0], size=sample_size, replace=False + ) + obs_indices.extend(ct_indices) + + plot_data = pd.DataFrame( + { + "X": adata_pyrovelocity.obsm["X_emb"][obs_indices, 0], + "Y": adata_pyrovelocity.obsm["X_emb"][obs_indices, 1], + "cell_type": adata_pyrovelocity.obs.state_info.values[ + obs_indices + ], + } + ) + sns.scatterplot( x="X", y="Y", - data=res, + data=plot_data, alpha=0.90, s=dotsize, linewidth=0, @@ -179,6 +216,7 @@ def plot_lineage_fate_correlation( ax=ax, legend="brief", ) + ax.get_legend().remove() ax.axis("off") if show_titles: From 1a498381e66889689143b9079583dd64b727704a Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Sun, 30 Mar 2025 16:19:24 -0400 Subject: [PATCH 31/32] chore(workflows): lineage fate correlation cache `2024.8.15.8` Signed-off-by: Cameron Smith --- src/pyrovelocity/workflows/main_workflow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyrovelocity/workflows/main_workflow.py b/src/pyrovelocity/workflows/main_workflow.py index 6bbc354fa..b8fd9a229 100644 --- a/src/pyrovelocity/workflows/main_workflow.py +++ b/src/pyrovelocity/workflows/main_workflow.py @@ -86,7 +86,7 @@ POSTPROCESS_CACHE_VERSION = f"{CACHE_VERSION}.2" SUMMARIZE_CACHE_VERSION = f"{CACHE_VERSION}.3" UPLOAD_CACHE_VERSION = f"{CACHE_VERSION}.7" -LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.7" +LINEAGE_FATE_CORRELATION_CACHE_VERSION = f"{CACHE_VERSION}.8" COMBINE_METRICS_CACHE_VERSION = f"{CACHE_VERSION}.5" DEFAULT_ACCELERATOR_TYPE: GPUAccelerator = T4 From bab340124f583e33ec2986b3d9fdf7174dd5bd6d Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Mon, 31 Mar 2025 00:10:35 -0400 Subject: [PATCH 32/32] chore(version): `0.4.1` Signed-off-by: Cameron Smith --- MODULE.bazel | 2 +- Makefile | 4 ++-- conda/colab/construct.yaml | 8 ++++---- containers/gpu.Dockerfile | 2 +- containers/pkg.Dockerfile | 2 +- docs/source/notebooks/pyrovelocity_colab_template.ipynb | 2 +- pyproject.toml | 2 +- scripts/conda | 8 ++++---- 8 files changed, 15 insertions(+), 15 deletions(-) diff --git a/MODULE.bazel b/MODULE.bazel index 10bd4a11e..c64f830f5 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -4,7 +4,7 @@ pyrovelocity MODULE module( name = "pyrovelocity", - version = "0.4.0", + version = "0.4.1", compatibility_level = 1, ) diff --git a/Makefile b/Makefile index b71e7ead3..6466aa440 100644 --- a/Makefile +++ b/Makefile @@ -934,8 +934,8 @@ approve-prs: ## Approve github pull requests from bots: PR_ENTRIES="2-5 10 12-18 fi; \ done -PREVIOUS_VERSION := 0.4.0b5 -NEXT_VERSION := 0.4.0 +PREVIOUS_VERSION := 0.4.0 +NEXT_VERSION := 0.4.1 VERSION_FILES := \ pyproject.toml \ conda/colab/construct.yaml \ diff --git a/conda/colab/construct.yaml b/conda/colab/construct.yaml index 59f853f0c..4c855a02d 100644 --- a/conda/colab/construct.yaml +++ b/conda/colab/construct.yaml @@ -1,5 +1,5 @@ name: pyrovelocity-colab -version: 0.4.0 +version: 0.4.1 channels: - pytorch @@ -89,7 +89,7 @@ specs: - brotli-python=1.1.0=py311hfdbb021_2 - bzip2=1.0.8=h4bc722e_7 - c-ares=1.34.4=hb9d3cd8_0 - - ca-certificates=2025.1.31=hbcca054_0 + - ca-certificates=2025.1.31=hbcca0.4.1 - cached-property=1.5.2=hd8ed1ab_1 - cached_property=1.5.2=pyha770c72_1 - cachetools=5.5.2=pyhd8ed1ab_0 @@ -218,7 +218,7 @@ specs: - h2=4.2.0=pyhd8ed1ab_0 - h5netcdf=1.6.1=pyhd8ed1ab_0 - h5py=3.13.0=nompi_py311hb639ac4_100 - - harfbuzz=10.4.0=h76408a6_0 + - harfbuzz=10.4.1=h76408a6_0 - hdf5=1.14.3=nompi_h2d575fe_109 - hicolor-icon-theme=0.17=ha770c72_2 - hpack=4.1.0=pyhd8ed1ab_0 @@ -294,7 +294,7 @@ specs: - libcufile=1.13.1.3=0 - libcups=2.3.3=h4637d8d_4 - libcurand=10.3.9.90=0 - - libcurl=8.12.1=h332b0f4_0 + - libcurl=8.12.1=h332b0.4.1 - libcusolver=11.6.1.9=0 - libcusparse=12.3.1.170=0 - libdeflate=1.23=h4ddbbb0_0 diff --git a/containers/gpu.Dockerfile b/containers/gpu.Dockerfile index 04977b43c..cb9eb7416 100644 --- a/containers/gpu.Dockerfile +++ b/containers/gpu.Dockerfile @@ -77,7 +77,7 @@ COPY . /root # development RUN pip install --no-deps -e . # distribution -# RUN pip install pyrovelocity==0.4.0 +# RUN pip install pyrovelocity==0.4.1 ARG tag ENV FLYTE_INTERNAL_IMAGE $tag diff --git a/containers/pkg.Dockerfile b/containers/pkg.Dockerfile index 523bb58a6..eb8c29cc7 100644 --- a/containers/pkg.Dockerfile +++ b/containers/pkg.Dockerfile @@ -34,7 +34,7 @@ COPY . /root # development RUN pip install --no-deps -e . # distribution -# RUN pip install pyrovelocity==0.4.0 +# RUN pip install pyrovelocity==0.4.1 ARG tag ENV FLYTE_INTERNAL_IMAGE $tag diff --git a/docs/source/notebooks/pyrovelocity_colab_template.ipynb b/docs/source/notebooks/pyrovelocity_colab_template.ipynb index 71c8d12a4..5cb694f1f 100644 --- a/docs/source/notebooks/pyrovelocity_colab_template.ipynb +++ b/docs/source/notebooks/pyrovelocity_colab_template.ipynb @@ -70,7 +70,7 @@ } ], "source": [ - "pyrovelocity_version = \"0.4.0\"\n", + "pyrovelocity_version = \"0.4.1\"\n", "pyrovelocity_colab_script_url = (\n", " \"https://storage.googleapis.com/pyrovelocity/data/scripts/\"\n", " + f\"pyrovelocity-colab-{pyrovelocity_version}-Linux-x86_64.sh\"\n", diff --git a/pyproject.toml b/pyproject.toml index 87d0eae8c..e5bf2c944 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pyrovelocity" -version = "0.4.0" +version = "0.4.1" packages = [{ include = "pyrovelocity", from = "src" }] description = "A multivariate RNA Velocity model to estimate future cell states with uncertainty using probabilistic modeling with pyro." authors = ["pyrovelocity team"] diff --git a/scripts/conda b/scripts/conda index 658864f59..e559f82c8 100755 --- a/scripts/conda +++ b/scripts/conda @@ -3,7 +3,7 @@ set -euo pipefail PACKAGE_NAME="pyrovelocity" -PACKAGE_VERSION="0.4.0" +PACKAGE_VERSION="0.4.1" CONDA_BUILD_STRING="pyhff70e4c" CONDA_BUILD_NUMBER="0" # CONDA_CHANNEL_LABEL="pyrovelocity_dev" @@ -32,7 +32,7 @@ Example: ./conda \\ --name pyrovelocity \\ - --version 0.4.0.dev1 \\ + --version 0.4.1.dev1 \\ --build-string pyhff70e4c \\ --build-number 0 \\ --label pyrovelocity_dev @@ -67,9 +67,9 @@ PACKAGE_SPEC="conda-forge/label/\ $CONDA_CHANNEL_LABEL::\ $PACKAGE_NAME=$PACKAGE_VERSION=$CONDA_BUILD_STRING"_"$CONDA_BUILD_NUMBER" -BLUE="\0.4.0;34;1m" +BLUE="\0.4.1;34;1m" BOLD="\033[1m" -NO_COLOR="\0.4.0m" +NO_COLOR="\0.4.1m" if [ "$USE_COLOR" = false ]; then BLUE="" BOLD=""