Skip to content

Commit

Permalink
maint: rename siibraconf -> perfconf
Browse files Browse the repository at this point in the history
maint: low ram/non local as perf default
feat: append all dataop for volumes
feat: query cursor data table & reconfigure
  • Loading branch information
xgui3783 committed Oct 23, 2024
1 parent 8094630 commit 198ac6f
Show file tree
Hide file tree
Showing 21 changed files with 276 additions and 306 deletions.
16 changes: 12 additions & 4 deletions e2e/atlases/test_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import numpy as np

import siibra
from siibra.attributes import datarecipes
from siibra.assignment.qualification import Qualification

regions = [
Expand Down Expand Up @@ -135,24 +136,31 @@ def jba29_fpf_reg_map():
yield region.extract_map("icbm 152"), [0, 211, 212]


# TODO (ASAP) higher hierarchy map not working
jba29_regmap_fx_name = [
"jba29_fp1lh_reg_map",
"jba29_fp1bh_reg_map",
"jba29_fpf_reg_map",
# "jba29_fp1bh_reg_map",
# "jba29_fpf_reg_map",
]


@pytest.mark.parametrize("fx_name", jba29_regmap_fx_name)
def test_regional_map_fetch_ok(fx_name, request):
nii, val = request.getfixturevalue(fx_name)
dr, val = request.getfixturevalue(fx_name)
assert isinstance(dr, datarecipes.DataRecipe)
nii = dr.get_data()
assert isinstance(
nii, nib.Nifti1Image
), f"Expected fetched is nifti image, but is not {type(nii)}"


# TODO (ASAP) unsure what value masked nifti should return
@pytest.mark.parametrize("fx_name", jba29_regmap_fx_name)
def test_regional_map_returns_mask(fx_name, request):
nii = request.getfixturevalue(fx_name)
pytest.skip("value of mask is incorrect")
dr, val = request.getfixturevalue(fx_name)
assert isinstance(dr, datarecipes.DataRecipe)
nii = dr.get_data()
if isinstance(nii, nib.Nifti1Image):
assert np.unique(nii.dataobj).tolist() == [
0,
Expand Down
41 changes: 11 additions & 30 deletions e2e/atttributes/dataproviders/volume/test_volumeprovider.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@
from siibra.attributes.locations import BoundingBox, Point
from siibra.attributes.datarecipes.volume import ImageRecipe
from siibra.operations.volume_fetcher.nifti import NiftiExtractVOI
from siibra.operations.volume_fetcher.neuroglancer_precomputed import (
NgPrecomputedFetchCfg,
)


JBA_31_ICBM152_LABELLED_URL = "https://data-proxy.ebrains.eu/api/v1/public/buckets/d-f1fe19e8-99bd-44bc-9616-a52850680777/maximum-probability-maps_MPMs_207-areas/JulichBrainAtlas_3.1_207areas_MPM_lh_MNI152.nii.gz"
Expand Down Expand Up @@ -108,8 +105,8 @@ def test_get_data_statledmap(jba_hoc1_lh_icbm152_stat_imgprov):


def test_get_data_labelledmap_voi(jba31_icbm152_labelled_improv, hoc1_lh_bbox):
jba31_icbm152_labelled_improv.transformation_ops.append(
NiftiExtractVOI.generate_specs(voi=hoc1_lh_bbox)
jba31_icbm152_labelled_improv = jba31_icbm152_labelled_improv.reconfigure(
voi=hoc1_lh_bbox
)
data = jba31_icbm152_labelled_improv.get_data()
assert isinstance(data, nib.Nifti1Image)
Expand All @@ -123,40 +120,24 @@ def test_get_data_labelledmap_voi(jba31_icbm152_labelled_improv, hoc1_lh_bbox):


def test_get_data_statmap_voi(jba_hoc1_lh_icbm152_stat_imgprov, fp1_lh_bbox):
jba_hoc1_lh_icbm152_stat_imgprov.transformation_ops.append(
NiftiExtractVOI.generate_specs(voi=fp1_lh_bbox)
jba_hoc1_lh_icbm152_stat_imgprov = jba_hoc1_lh_icbm152_stat_imgprov.reconfigure(
voi=fp1_lh_bbox
)
data = jba_hoc1_lh_icbm152_stat_imgprov.get_data()
assert isinstance(data, nib.Nifti1Image)
assert np.max(data.dataobj) < 0.01
assert data.dataobj.shape == (48, 29, 64)


def test_ng_nifti_extractvoi(bb_template, bb_test_bbox, tmp_nii_filename):
bb_template.transformation_ops.extend(
[
NiftiExtractVOI.generate_specs(voi=bb_test_bbox),
NgPrecomputedFetchCfg.generate_specs(
fetch_config={"max_download_GB": 1, "resolution_mm": 8e-2}
),
]
)
nii = bb_template.get_data()
nii.to_filename(tmp_nii_filename)
with open(tmp_nii_filename, "rb") as fp:
assert hashlib.md5(fp.read()).hexdigest() == "257d7d1549f9dbff5622f9e4147f996a"
def test_ng_get_parameters(bb_template, bb_test_bbox):
bb_template_params = bb_template.get_parameters()
param_names = bb_template_params["param_name"].tolist()
assert "bbox" in param_names
assert "resolution_mm" in param_names


def test_ng_nifti_fetch_kwargs(bb_template, bb_test_bbox, tmp_nii_filename):
bb_template.transformation_ops.append(
NgPrecomputedFetchCfg.generate_specs(
fetch_config={
"max_download_GB": 1,
"resolution_mm": 8e-2,
"bbox": bb_test_bbox,
}
),
)
def test_ng_nifti_extractvoi(bb_template, bb_test_bbox, tmp_nii_filename):
bb_template = bb_template.reconfigure(bbox=bb_test_bbox, resolution_mm=8e-2)
nii = bb_template.get_data()
nii.to_filename(tmp_nii_filename)
with open(tmp_nii_filename, "rb") as fp:
Expand Down
72 changes: 48 additions & 24 deletions e2e/features/test_receptor_density.py
Original file line number Diff line number Diff line change
@@ -1,60 +1,84 @@
import pytest
import siibra


@pytest.fixture(scope="session")
def all_receptor_density_features():
def receptor_density_search_cursor():
modality = siibra.modality_vocab.modality["receptor"]
query = siibra.QueryParam(attributes=[modality])
yield siibra.find([query], siibra.Feature)
yield siibra.SearchResult(criteria=[query], search_type=siibra.Feature)


@pytest.fixture(scope="session")
def filtered_for_fp(all_receptor_density_features):
yield siibra.Feature.filter_facets(all_receptor_density_features, {
"Data Type": "fingerprint"
})

def all_receptor_density_features(receptor_density_search_cursor):
yield receptor_density_search_cursor.find()


@pytest.fixture(scope="session")
def filtered_for_pr(all_receptor_density_features):
yield siibra.Feature.filter_facets(all_receptor_density_features, {
"Data Type": "cortical profile"
})
def filtered_for_fp(receptor_density_search_cursor):
new_cursor = receptor_density_search_cursor.reconfigure(
spec={"category_Data Type": "fingerprint"}
)
yield new_cursor.find()

def has_fp(feat: siibra.Feature):
facets = feat.facets
return len(facets.query("key == 'Data Type' & value == 'fingerprint'")) > 0

def has_profile(feat: siibra.Feature):
facets = feat.facets
return len(facets.query("key == 'Data Type' & value == 'cortical profile'")) > 0
@pytest.fixture(scope="session")
def filtered_for_pr(receptor_density_search_cursor):
new_cursor = receptor_density_search_cursor.reconfigure(
spec={"category_Data Type": "cortical profile"}
)
yield new_cursor.find()


# at least 1 feature
def test_all_receptor_features(all_receptor_density_features):
assert len(all_receptor_density_features) > 0


# no duplicated ID
def test_no_duplicated_id(all_receptor_density_features):
id_set = {feat._get(siibra.attributes.descriptions.ID).value for feat in all_receptor_density_features}
id_set = {
feat._get(siibra.attributes.descriptions.ID).value
for feat in all_receptor_density_features
}
assert len(id_set) == len(all_receptor_density_features)


def is_fp(feat: siibra.Feature):
for cat in feat._find(siibra.attributes.descriptions.Categorization):
if cat.key == "Data Type" and cat.value == "fingerprint":
return True
return False


def is_pr(feat: siibra.Feature):
for cat in feat._find(siibra.attributes.descriptions.Categorization):
if cat.key == "Data Type" and cat.value == "cortical profile":
return True
return False


# fingerprint
def test_some_has_fp(all_receptor_density_features):
assert any(has_fp(feat) for feat in all_receptor_density_features)
assert any(is_fp(feat) for feat in all_receptor_density_features)


def test_some_has_no_fp(all_receptor_density_features):
assert any(not has_fp(feat) for feat in all_receptor_density_features)
assert any(not is_fp(feat) for feat in all_receptor_density_features)


def test_after_filter_all_has_fp(filtered_for_fp):
assert all(has_fp(feat) for feat in filtered_for_fp)
assert all(is_fp(feat) for feat in filtered_for_fp)


# cortical profiles
def test_some_has_pr(all_receptor_density_features):
assert any(has_profile(feat) for feat in all_receptor_density_features)
assert any(is_pr(feat) for feat in all_receptor_density_features)


def test_some_has_no_pr(all_receptor_density_features):
assert any(not has_profile(feat) for feat in all_receptor_density_features)
assert any(not is_pr(feat) for feat in all_receptor_density_features)


def test_after_filter_all_has_pr(filtered_for_pr):
assert all(has_profile(feat) for feat in filtered_for_pr)
assert all(is_pr(feat) for feat in filtered_for_pr)
9 changes: 7 additions & 2 deletions siibra/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,13 @@ def get_search_cursor(criteria: List[AttributeCollection], find_type: Type[T]):
return SearchResult(criteria=criteria, search_type=find_type)


def find(criteria: List[AttributeCollection], find_type: Type[T]):
res = get_search_cursor(criteria, find_type)
def find(criteria: List[AttributeCollection], Klass: Type[T]):
"""
Returns all instances of type Klass where all criterion in criteria matches.
A criterion is considered matching if **any** of its
"""
res = get_search_cursor(criteria, Klass)
return res.find()


Expand Down
35 changes: 30 additions & 5 deletions siibra/assignment/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,17 +45,19 @@ class SearchResult(Generic[T]):
opt for the criteria that is most likely to return false first (e.g. for feature, Modality)
"""

def __init__(self, criteria=None, search_type: Type[T] = None):
def __init__(self, criteria=None, search_type: Type[T] = None, filter_kwargs=None):
if search_type is None:
raise RuntimeError(f"search_type must be defined!")
self.filter_kwargs = filter_kwargs or {}
self.search_type = search_type
self.criteria = criteria or []

criteria: List[AttributeCollection] = field(default_factory=list)
search_type: Type[T] = None

def find(self) -> List[T]:
return SearchResult.cached_find(self.criteria, self.search_type)
summary_table = self.get_summary_table()
return summary_table["instance"].tolist()

@staticmethod
def _find_iter(criteria: List[AttributeCollection], search_type: Type[T]):
Expand Down Expand Up @@ -95,7 +97,7 @@ def build_summary_table(items: List[T]):
# In case key is one of ID, name etc, prepend to avoid name collision
"categorizations": item.categorizations,
**{
f"category_{categorization.key}": categorization.value
f"category_{categorization.key}": str(categorization.value)
for categorization in item._find(Categorization)
},
"ID": item.ID,
Expand All @@ -118,10 +120,33 @@ def pick_instance(items: List[T], expr=None, index=None) -> T:
raise NotImplementedError

def get_summary_table(self):
return self.build_summary_table(self.find())
table = self.build_summary_table(
SearchResult.cached_find(self.criteria, self.search_type)
)
if len(self.filter_kwargs) == 0:
return table

search_str = " & ".join(
[f"`{key}` == '{value}'" for key, value in self.filter_kwargs.items()]
)
return table.query(search_str)

def get_instance(self, expr=None, index=None):
"""
Allow user to apply what was learnt from get_summary_table and get a subset of the search.
"""
return self.pick_instance(self.find(), expr=expr, index=index)
return self.pick_instance(
SearchResult.cached_find(self.criteria, self.search_type),
expr=expr,
index=index,
)

def reconfigure(self, spec=None, **kwargs):
filter_kwargs = {}
filter_kwargs.update(spec or {})
filter_kwargs.update(kwargs)
return SearchResult(
criteria=self.criteria,
search_type=self.search_type,
filter_kwargs=filter_kwargs,
)
22 changes: 9 additions & 13 deletions siibra/atlases/parcellationmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import numpy as np
from pandas import DataFrame

from ..concepts import AtlasElement
from ..concepts import atlas_elements, query_parameter
from ..commons.iterable import assert_ooo
from ..commons.string import convert_hexcolor_to_rgbtuple
from ..commons.logger import logger, siibra_tqdm, QUIET
Expand Down Expand Up @@ -57,7 +57,7 @@


@dataclass(repr=False, eq=False)
class Map(AtlasElement):
class Map(atlas_elements.AtlasElement):
schema: str = "siibra/atlases/parcellationmap/v0.1"
parcellation_id: str = None
space_id: str = None
Expand All @@ -75,23 +75,19 @@ def __post_init__(self):

@property
def parcellation(self) -> "ParcellationScheme":
from ..factory import iter_preconfigured
from .. import find

return assert_ooo(
[
parc
for parc in iter_preconfigured(ParcellationScheme)
if parc.ID == self.parcellation_id
]
query_param = query_parameter.QueryParam(
attributes=[_ID(value=self.parcellation_id)]
)
return assert_ooo(find([query_param], ParcellationScheme))

@property
def space(self) -> "Space":
from ..factory import iter_preconfigured
from .. import find

return assert_ooo(
[sp for sp in iter_preconfigured(Space) if sp.ID == self.space_id]
)
query_param = query_parameter.QueryParam(attributes=[_ID(value=self.space_id)])
return assert_ooo(find([query_param], Space))

@property
def regionnames(self) -> List[str]:
Expand Down
8 changes: 4 additions & 4 deletions siibra/atlases/sparsemap.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from ..operations.file_fetcher.io.base import PartialReader
from ..operations.file_fetcher.io import MemoryPartialReader
from ..commons.logger import siibra_tqdm, logger
from ..commons.conf import SiibraConf
from ..commons.conf import PerfConf
from ..cache import CACHE
from ..operations.image_assignment import (
ScoredImageAssignment,
Expand Down Expand Up @@ -494,8 +494,8 @@ def lookup_points(
**fetch_kwargs: VolumeOpsKwargs,
) -> DataFrame:
spind = self._get_readable_sparseindex(
warmup=SiibraConf.KEEP_LOCAL_CACHE > 0,
inmemory=SiibraConf.MEMORY_HUNGRY > 0,
warmup=PerfConf.KEEP_LOCAL_CACHE > 0,
inmemory=PerfConf.MEMORY_HUNGRY > 0,
)
if spind is None:
return super().lookup_points(points, **fetch_kwargs)
Expand Down Expand Up @@ -558,7 +558,7 @@ def assign(
return self.lookup_points(queryitem, **volume_ops_kwargs)

spind = self._get_readable_sparseindex(
warmup=SiibraConf.KEEP_LOCAL_CACHE > 0, inmemory=SiibraConf.MEMORY_HUNGRY > 0
warmup=PerfConf.KEEP_LOCAL_CACHE > 0, inmemory=PerfConf.MEMORY_HUNGRY > 0
)
queryitemloc = (
bbox_from_imageprovider(queryitem)
Expand Down
Loading

0 comments on commit 198ac6f

Please sign in to comment.