Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
__pycache__/
*.py[cod]
*.egg-info/
dist/
build/
.eggs/
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
# NicheSphere

[![DOI](https://zenodo.org/badge/685916590.svg)](https://doi.org/10.5281/zenodo.18756710)
![PyPI - Version](https://img.shields.io/pypi/v/NicheSphere)


NicheSphere is an sc-verse compatible Python library which allows the user to find differential co-localization domains / niches based on cell type pair co-localization probabilities in different conditions. Cell type pair co-localization probabilities can be obtained in different ways, for example, through deconvolution of spatial transcriptomics / PIC-seq data (getting the probabilities of finding each cell type in each spot / multiplet) ; or counting nearest neighbors of each type for each cell in single cell spatial data like MERFISH or CODEX.

Expand Down
544 changes: 426 additions & 118 deletions poetry.lock

Large diffs are not rendered by default.

8 changes: 5 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,13 @@ readme = "README.md"
where = ["src"]

[tool.poetry.dependencies]
python = "^3.10"
scanpy = "1.10.4"
python = "^3.11"
scanpy = ">=1.10.4,<1.12"
leidenalg = "^0.10.2"
pot = "^0.9.5"
anndata = "0.11.4"
anndata = ">=0.12.11,<0.13"
zarr = ">=3.1.6,<4"
pillow = ">=12.2.0"

[build-system]
requires = ["poetry-core"]
Expand Down
815 changes: 494 additions & 321 deletions requirements.txt

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion src/nichesphere/coloc.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,7 +387,7 @@ def colocNW(x_diff,adj, cell_group, group=None, group_cmap='tab20', ncols=20, cl
graycmp = ListedColormap(graycmp)

#cell groups cmap
cmap = plt.cm.get_cmap(group_cmap, ncols)
cmap = plt.colormaps[group_cmap].resampled(ncols)
if clist == None:
cgroup_cmap=[mcolors.rgb2hex(cmap(i)[:3]) for i in range(cmap.N)]
else:
Expand Down
6 changes: 3 additions & 3 deletions src/nichesphere/comm.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,12 +238,12 @@ def getDiffComm(diffCommTbl, pairCatDF, ncells, cat):
cells x cells or groups x groups dataframe of differential communication scores for a specific LR category
"""
x=pd.DataFrame(pairCatDF.cell_pairs)
x['wilcoxStat']=0
x['wilcoxStat']=0.0
x.index=pairCatDF.cell_pairs


for i in diffCommTbl.columns:
x.wilcoxStat[i]=diffCommTbl[i][cat]
x.loc[i, 'wilcoxStat']=diffCommTbl[i][cat]


x=pd.Series(x.wilcoxStat)
Expand Down Expand Up @@ -342,7 +342,7 @@ def catNW(x_chem,colocNW, cell_group, group=None, group_cmap='tab20', ncols=20,
graycmp = ListedColormap(graycmp)

#cell group cmap
cmap = plt.cm.get_cmap(group_cmap, ncols)
cmap = plt.colormaps[group_cmap].resampled(ncols)
if clist == None:
cgroup_cmap=[mcolors.rgb2hex(cmap(i)[:3]) for i in range(cmap.N)]
else:
Expand Down
16 changes: 8 additions & 8 deletions src/nichesphere/tl.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ def cells_niche_colors(CTs, niche_colors, niche_dict):
niche_df['niche']=niche_colors.index[0]
niche_df['color']=niche_colors[0]
for key in list(niche_dict.keys()):
niche_df['niche'][[c in niche_dict[key] for c in niche_df.cell]]=key
niche_df['color'][niche_df['niche']==key]=niche_colors[key]
niche_df.loc[[c in niche_dict[key] for c in niche_df.cell], 'niche']=key
niche_df.loc[niche_df['niche']==key, 'color']=niche_colors[key]
niche_df.index=niche_df.cell
niche_df.niche=niche_df.niche.astype('category')
return niche_df
Expand Down Expand Up @@ -151,7 +151,7 @@ def PIC_BGdoubletsOEratios(adata_singlets, annot_col):
## Get random singlets pairs
pairNums=[i for i in range(int(np.round(adata_singlets.obs.shape[0]//2))) for _ in range(2)]
pairNumsIdx=random.sample(list(adata_singlets.obs.index), len(pairNums))
rdf.pair[pairNumsIdx]=pairNums
rdf.loc[pairNumsIdx, 'pair']=pairNums

pairCounts=[rdf.annot[rdf.pair==i][0]+'-'+rdf.annot[rdf.pair==i][1] for i in rdf.pair.value_counts().index[rdf.pair.value_counts()==2]]

Expand Down Expand Up @@ -234,10 +234,10 @@ def get_pairCatDFdir(niches_df):

pairCatDFdir['niche_pairs']=''
for clust in np.sort(niches_df.niche.unique()):
pairCatDFdir['niche_pairs'][[cellCatContained(pair=p, cellCat=niches_df.cell[niches_df.niche==clust]) for p in pairCatDFdir.cell_pairs]]=clust+'->'+clust
pairCatDFdir.loc[[cellCatContained(pair=p, cellCat=niches_df.cell[niches_df.niche==clust]) for p in pairCatDFdir.cell_pairs], 'niche_pairs']=clust+'->'+clust

for comb in list(itertools.permutations(list(niches_df.niche.unique().sort_values()), 2)):
pairCatDFdir['niche_pairs'][[(p.split('->')[0] in niches_df.cell[niches_df.niche==comb[0]]) & (p.split('->')[1] in niches_df.cell[niches_df.niche==comb[1]]) for p in pairCatDFdir.cell_pairs]]=comb[0]+'->'+comb[1]
pairCatDFdir.loc[[(p.split('->')[0] in niches_df.cell[niches_df.niche==comb[0]]) & (p.split('->')[1] in niches_df.cell[niches_df.niche==comb[1]]) for p in pairCatDFdir.cell_pairs], 'niche_pairs']=comb[0]+'->'+comb[1]

return pairCatDFdir
#%%
Expand Down Expand Up @@ -288,10 +288,10 @@ def getColocFilter(pairCatDF, adj, oneCTints):
colocFilt['filter']=0

for i in pairCatDF.cell_pairs:
colocFilt['filter'][i]=adj.loc[i.split('->')[1],i.split('->')[0]]
colocFilt.loc[i, 'filter']=adj.loc[i.split('->')[1],i.split('->')[0]]

colocFilt['filter'][oneCTints]=1
colocFilt['filter'][colocFilt['filter']>0]=1
colocFilt.loc[oneCTints, 'filter']=1
colocFilt.loc[colocFilt['filter']>0, 'filter']=1
colocFilt=pd.DataFrame(colocFilt['filter'], index=colocFilt.index, columns=['filter'])
return colocFilt

Expand Down
Binary file not shown.