-
Notifications
You must be signed in to change notification settings - Fork 40
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
9 changed files
with
425 additions
and
187 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
FROM polusai/bfio:2.3.3 | ||
|
||
# environment variables defined in polusai/bfio | ||
ENV EXEC_DIR="/opt/executables" | ||
ENV POLUS_IMG_EXT=".ome.tif" | ||
ENV POLUS_TAB_EXT=".csv" | ||
ENV POLUS_LOG="INFO" | ||
|
||
# Work directory defined in the base container | ||
WORKDIR ${EXEC_DIR} | ||
|
||
COPY pyproject.toml ${EXEC_DIR} | ||
COPY VERSION ${EXEC_DIR} | ||
COPY README.md ${EXEC_DIR} | ||
COPY src ${EXEC_DIR}/src | ||
|
||
RUN pip3 install ${EXEC_DIR} --no-cache-dir | ||
|
||
ENTRYPOINT ["python3", "-m", "polus.plugins.transforms.images.apply_flatfield"] | ||
CMD ["--help"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
#!/bin/bash | ||
|
||
version=$(<VERSION) | ||
datapath=$(readlink --canonicalize ./data) | ||
echo ${datapath} | ||
|
||
# Inputs | ||
imgDir="/data/images" | ||
imgPattern="p{p:d+}_x{x:d+}_y{y:d+}_wx{r:d+}_wy{z:d+}_c{c:d+}.ome.tif" | ||
ffDir="/data/estimation" | ||
brightPattern="p{p:d+}_x\\(01-24\\)_y\\(01-16\\)_wx\\(1-3\\)_wy\\(1-3\\)_c{c:d+}_flatfield.ome.tif" | ||
darkPattern="p{p:d+}_x\\(01-24\\)_y\(01-16\\)_wx\\(1-3\\)_wy\\(1-3\\)_c{c:d+}_darkfield.ome.tif" | ||
# photoPattern="" | ||
|
||
# Output paths | ||
outDir=/data/outputs | ||
|
||
FILE_EXT=".ome.zarr" | ||
|
||
docker run --mount type=bind,source=${datapath},target=/data/ \ | ||
-e POLUS_EXT=${FILE_EXT} \ | ||
--user $(id -u):$(id -g) \ | ||
polusai/apply-flatfield-plugin:${version} \ | ||
--imgDir ${imgDir} \ | ||
--imgPattern ${imgPattern} \ | ||
--ffDir ${ffDir} \ | ||
--brightPattern ${brightPattern} \ | ||
--darkPattern ${darkPattern} \ | ||
--outDir ${outDir} |
158 changes: 1 addition & 157 deletions
158
...es/apply-flatfield-plugin/src/polus/plugins/transforms/images/apply_flatfield/__init__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,162 +1,6 @@ | ||
"""Provides the apply_flatfield module.""" | ||
|
||
import concurrent.futures | ||
import logging | ||
import operator | ||
import pathlib | ||
import sys | ||
import typing | ||
|
||
import bfio | ||
import numpy | ||
import tqdm | ||
from filepattern import FilePattern | ||
|
||
from . import utils | ||
from .apply_flatfield import apply | ||
|
||
__version__ = "2.0.0-dev8" | ||
|
||
logger = logging.getLogger(__name__) | ||
logger.setLevel(utils.POLUS_LOG) | ||
|
||
|
||
def apply( # noqa: PLR0913 | ||
img_dir: pathlib.Path, | ||
img_pattern: str, | ||
ff_dir: pathlib.Path, | ||
ff_pattern: str, | ||
df_pattern: typing.Optional[str], | ||
out_dir: pathlib.Path, | ||
) -> None: | ||
"""Run batch-wise flatfield correction on the image collection.""" | ||
img_fp = FilePattern(str(img_dir), img_pattern) | ||
img_variables = img_fp.get_variables() | ||
|
||
ff_fp = FilePattern(str(ff_dir), ff_pattern) | ||
ff_variables = ff_fp.get_variables() | ||
|
||
# check that ff_variables are a subset of img_variables | ||
if set(ff_variables) - set(img_variables): | ||
msg = ( | ||
f"Flatfield variables are not a subset of image variables: " | ||
f"{ff_variables} - {img_variables}" | ||
) | ||
raise ValueError(msg) | ||
|
||
if (df_pattern is None) or (not df_pattern): | ||
df_fp = None | ||
else: | ||
df_fp = FilePattern(str(ff_dir), df_pattern) | ||
df_variables = df_fp.get_variables() | ||
if set(df_variables) != set(ff_variables): | ||
msg = ( | ||
f"Flatfield and darkfield variables do not match: " | ||
f"{ff_variables} != {df_variables}" | ||
) | ||
raise ValueError(msg) | ||
|
||
for group, files in img_fp(group_by=ff_variables): | ||
img_paths = [p for _, [p] in files] | ||
variables = dict(group) | ||
|
||
ff_path: pathlib.Path = ff_fp.get_matching(**variables)[0][1][0] | ||
|
||
df_path = None if df_fp is None else df_fp.get_matching(**variables)[0][1][0] | ||
|
||
_unshade_images(img_paths, out_dir, ff_path, df_path) | ||
|
||
|
||
def _unshade_images( | ||
img_paths: list[pathlib.Path], | ||
out_dir: pathlib.Path, | ||
ff_path: pathlib.Path, | ||
df_path: typing.Optional[pathlib.Path], | ||
) -> None: | ||
"""Remove the given flatfield components from all images and save outputs. | ||
Args: | ||
img_paths: list of paths to images to be processed | ||
out_dir: directory to save the corrected images | ||
ff_path: path to the flatfield image | ||
df_path: path to the darkfield image | ||
""" | ||
with bfio.BioReader(ff_path, max_workers=2) as bf: | ||
ff_image = bf[:, :, :, 0, 0].squeeze() | ||
|
||
if df_path is not None: | ||
with bfio.BioReader(df_path, max_workers=2) as df: | ||
df_image = df[:, :, :, 0, 0].squeeze() | ||
else: | ||
df_image = None | ||
|
||
batch_indices = list(range(0, len(img_paths), 16)) | ||
if batch_indices[-1] != len(img_paths): | ||
batch_indices.append(len(img_paths)) | ||
|
||
for i_start, i_end in tqdm.tqdm( | ||
zip(batch_indices[:-1], batch_indices[1:]), | ||
total=len(batch_indices) - 1, | ||
): | ||
_unshade_batch( | ||
img_paths[i_start:i_end], | ||
out_dir, | ||
ff_image, | ||
df_image, | ||
) | ||
|
||
|
||
def _unshade_batch( | ||
batch_paths: list[pathlib.Path], | ||
out_dir: pathlib.Path, | ||
ff_image: numpy.ndarray, | ||
df_image: typing.Optional[numpy.ndarray] = None, | ||
) -> None: | ||
"""Apply flatfield correction to a batch of images. | ||
Args: | ||
batch_paths: list of paths to images to be processed | ||
out_dir: directory to save the corrected images | ||
ff_image: component to be used for flatfield correction | ||
df_image: component to be used for flatfield correction | ||
""" | ||
# Load images | ||
images = [] | ||
with concurrent.futures.ProcessPoolExecutor( | ||
max_workers=utils.MAX_WORKERS, | ||
) as load_executor: | ||
load_futures = [] | ||
for i, inp_path in enumerate(batch_paths): | ||
load_futures.append(load_executor.submit(utils.load_img, inp_path, i)) | ||
|
||
for lf in tqdm.tqdm( | ||
concurrent.futures.as_completed(load_futures), | ||
total=len(load_futures), | ||
desc="Loading batch", | ||
): | ||
images.append(lf.result()) | ||
|
||
images = [img for _, img in sorted(images, key=operator.itemgetter(0))] | ||
img_stack = numpy.stack(images, axis=0) | ||
|
||
# Apply flatfield correction | ||
if df_image is not None: | ||
img_stack -= df_image | ||
|
||
img_stack /= ff_image | ||
|
||
# Save outputs | ||
with concurrent.futures.ProcessPoolExecutor( | ||
max_workers=utils.MAX_WORKERS, | ||
) as save_executor: | ||
save_futures = [] | ||
for inp_path, img in zip(batch_paths, img_stack): | ||
save_futures.append( | ||
save_executor.submit(utils.save_img, inp_path, img, out_dir), | ||
) | ||
|
||
for sf in tqdm.tqdm( | ||
concurrent.futures.as_completed(save_futures), | ||
total=len(save_futures), | ||
desc="Saving batch", | ||
): | ||
sf.result() |
Oops, something went wrong.