scanpy处理:使用自定义 python 函数读取百迈客空间转录组数据(百创智造S1000)

原本的版本用的怪怪的,定义了一个函数方便直接在解释器中使用。函数返回adata对象,示例代码如下:

def read_bmk(input, input_png, library_id):
    import cv2
    import os
    adata = sc.read_10x_mtx(input, var_names='gene_symbols',cache=False)
    barcode_pos = pd.read_csv(os.path.join(input, "barcodes_pos.tsv.gz"), compression='gzip', sep='\t', names=["barcode","array_col","array_row"],header=None)
    barcode_pos['in_tissue']=1
    barcode_pos=barcode_pos[['barcode','in_tissue','array_row','array_col']]
    barcode_pos.index = barcode_pos['barcode']
    barcode_pos = barcode_pos.loc[adata.obs_names, :] 
    adata.obs=barcode_pos
    adata.obs.index = adata.obs['barcode'].to_list()
    adata.obs=adata.obs[['in_tissue','array_row','array_col']]

    obsm=barcode_pos[['array_col','array_row']]
    obsm= obsm.to_numpy()
    adata.obsm["spatial"] = obsm

    he_img = cv2.imread(input_png)
    he_img=he_img/255
    adata.uns["spatial"] = {library_id: {}}
    adata.uns["spatial"][library_id]["images"] = {}
    adata.uns['spatial'][library_id]['images']['hires'] = he_img.astype(np.float32)
    adata.uns['spatial'][library_id]['use_quality']='hires'

    def cal_zoom_rate(width, height):
        std_width = 1000
        std_height = std_width / (46 * 31) * (46 * 36 * np.sqrt(3) / 2.0)
        if std_width / std_height > width / height:
            scale = width / std_width
        else:
            scale = height / std_height
        return scale

    zoom_scale = cal_zoom_rate(he_img.shape[1], he_img.shape[0])
    adata.uns['spatial'][library_id]['scalefactors']= {"spot_diameter_fullres": zoom_scale, "tissue_hires_scalef": zoom_scale,
                                                    "fiducial_diameter_fullres": zoom_scale, "tissue_lowres_scalef": zoom_scale}
    sc.pl.spatial(adata, img_key="hires")
    return adata

使用示例,选择level7的标准输出创建看看:

adata = read_bmk(input = "./self_data_analysis/cancer1/L7_heAuto",
                  input_png="./self_data_analysis/cancer1/he_roi_small.png",
                  library_id="cancer1")

你可能感兴趣的:(python,人工智能,算法,深度学习,数据分析,大数据,经验分享)