Skip to content

Commit

Permalink
refactor to copy channel in separate rule
Browse files Browse the repository at this point in the history
better for parallelization
  • Loading branch information
akhanf committed Jan 14, 2025
1 parent 1574a62 commit 79e61a2
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 14 deletions.
46 changes: 46 additions & 0 deletions workflow/rules/imaris.smk
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,56 @@ rule imaris_to_metadata:
script:
"../scripts/imaris_to_metadata.py"

rule imaris_channel_to_zarr:
input:
ims=get_input_sample,
params:
channel=lambda wildcards: get_stains(wildcards).index(wildcards.stain),
output:
zarr=directory(bids(
root=root,
subject="{subject}",
datatype="micr",
sample="{sample}",
acq="{acq}",
stain="{stain}",
suffix="imaris.zarr",
)),
log:
bids(
root="logs",
subject="{subject}",
datatype="imaris_channel_to_zarr",
sample="{sample}",
acq="{acq}",
stain="{stain}",
suffix="log.txt",
),
container:
config["containers"]["spimprep"]
group:
"preproc"
threads: 1
resources:
runtime=360,
mem_mb=1000,
shadow: 'minimal'
script:
"../scripts/imaris_channel_to_zarr.py"


rule imaris_to_ome_zarr:
input:
ims=get_input_sample,
zarr=lambda wildcards: expand(bids(
root=root,
subject="{subject}",
datatype="micr",
sample="{sample}",
acq="{acq}",
stain="{stain}",
suffix="imaris.zarr",
),stain=get_stains(wildcards),allow_missing=True),
metadata_json=rules.prestitched_to_metadata.output.metadata_json,
params:
max_downsampling_layers=config["ome_zarr"]["max_downsampling_layers"],
Expand Down
10 changes: 10 additions & 0 deletions workflow/scripts/imaris_channel_to_zarr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import h5py
import hdf5plugin
import zarr
from sys import stdout #change this to log file later..

source = h5py.File(snakemake.input.ims, mode='r')
dest = zarr.open_group(snakemake.output.zarr, mode='w')
zarr.copy(source['DataSet/ResolutionLevel 0/TimePoint 0/Channel {chan}/Data'.format(chan=snakemake.params.channel)], dest,log=stdout,compressor=None)
source.close()

17 changes: 3 additions & 14 deletions workflow/scripts/imaris_to_ome_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,6 @@


stains=snakemake.params.stains



source = h5py.File(snakemake.input.ims, mode='r')
dest = zarr.open_group('copy_hdf5.zarr', mode='w')
from sys import stdout
for chan in range(len(stains)):
zarr.copy(source[f'DataSet/ResolutionLevel 0/TimePoint 0/Channel {chan}/Data'], dest, name=f'channel_{chan}',log=stdout)
source.close()

in_zarr='copy_hdf5.zarr'
metadata_json=snakemake.input.metadata_json
downsampling=snakemake.params.downsampling
max_layer=snakemake.params.max_downsampling_layers #number of downsamplings by 2 to include in zarr
Expand Down Expand Up @@ -73,15 +62,15 @@


darr_list=[]
for zarr_i,stain in enumerate(stains):
for zarr_i,in_zarr in enumerate(snakemake.input.zarr):
#open zarr to get group name
zi = zarr.open(in_zarr)
darr_list.append(da.from_zarr(in_zarr,component=f'channel_{zarr_i}').rechunk(rechunk_size))
darr_list.append(da.from_zarr(in_zarr,component='Data').rechunk(rechunk_size))


#append to omero metadata
channel_metadata={key:val for key,val in snakemake.config['ome_zarr']['omero_metadata']['channels']['defaults'].items()}
channel_name=stain
channel_name=stains[zarr_i]
channel_metadata['label'] = channel_name
default_color=snakemake.config['ome_zarr']['omero_metadata']['channels']['default_color']
color=snakemake.config['ome_zarr']['omero_metadata']['channels']['color_mapping'].get(channel_name,default_color)
Expand Down

0 comments on commit 79e61a2

Please sign in to comment.