Skip to content
2 changes: 1 addition & 1 deletion TPTBox/core/bids_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
"MP2RAG",
"MPM",
"MT",
"MT",
"MTS",
"T1map",
"T2map",
"T2starmap",
Expand Down
35 changes: 26 additions & 9 deletions TPTBox/core/bids_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -1104,6 +1104,22 @@ def get_frame_of_reference_uid(self, default=None):
base36 = chars[i] + base36
return base36[:length]

def get_identifier(self, sequence_splitting_keys: list[str]) -> str:
"""Generates an identifier for the BIDS_FILE based on subject and splitting keys

Args:
sequence_splitting_keys (list[str]): list of keys to use for splitting
"""
if "sub" not in self.info:
print(f"family_id, no sub-key, got {self.info}")
identifier = "sub-404"
else:
identifier = "sub-" + self.info["sub"]
for s in self.info.keys():
if s in sequence_splitting_keys:
identifier += "_" + s + "-" + self.info[s]
return identifier


class Searchquery:
def __init__(self, subj: Subject_Container, flatten=False) -> None:
Expand Down Expand Up @@ -1464,15 +1480,16 @@ def __lt__(self, other):

def get_identifier(self):
first_e = self.data_dict[next(iter(self.data_dict.keys()))][0]
if "sub" not in first_e.info:
print(f"family_id, no sub-key, got {first_e.info} and data_dict {list(self.data_dict.keys())}")
identifier = "sub-404"
else:
identifier = "sub-" + first_e.info["sub"]
for s in first_e.info.keys():
if s in self.sequence_splitting_keys:
identifier += "_" + s + "-" + first_e.info[s]
return identifier
return first_e.get_identifier(self.sequence_splitting_keys)
# if "sub" not in first_e.info:
# print(f"family_id, no sub-key, got {first_e.info} and data_dict {list(self.data_dict.keys())}")
# identifier = "sub-404"
# else:
# identifier = "sub-" + first_e.info["sub"]
# for s in first_e.info.keys():
# if s in self.sequence_splitting_keys:
# identifier += "_" + s + "-" + first_e.info[s]
# return identifier
Comment on lines +1484 to +1492
Copy link

Copilot AI Mar 3, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a large block of commented-out legacy implementation left after the new early return. Since it is now unreachable and duplicates the extracted logic, it should be removed to avoid confusion and reduce maintenance overhead.

Suggested change
# if "sub" not in first_e.info:
# print(f"family_id, no sub-key, got {first_e.info} and data_dict {list(self.data_dict.keys())}")
# identifier = "sub-404"
# else:
# identifier = "sub-" + first_e.info["sub"]
# for s in first_e.info.keys():
# if s in self.sequence_splitting_keys:
# identifier += "_" + s + "-" + first_e.info[s]
# return identifier

Copilot uses AI. Check for mistakes.

def items(self):
return self.data_dict.items()
Expand Down
5 changes: 3 additions & 2 deletions TPTBox/core/nii_poi_abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -305,11 +305,12 @@ def assert_affine(

# Print errors
for err in found_errors:
log.print(err, ltype=Log_Type.FAIL, verbose=verbose)
text = f"{text}; {err}" if text else f"{err}"
log.print(f"{text}", ltype=Log_Type.FAIL, verbose=verbose)
Comment on lines 307 to +309
Copy link

Copilot AI Mar 3, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

text is being mutated inside the loop and then printed each iteration, which causes later log lines to include earlier errors (e.g., second line becomes "err1; err2"). This produces duplicated/expanding messages and makes the per-error output hard to read. Consider either printing each err with the original text prefix (without mutating it), or build a single aggregated message once after the loop and print/raise that.

Copilot uses AI. Check for mistakes.
# Final conclusion and possible raising of AssertionError
has_errors = len(found_errors) > 0
if raise_error and has_errors:
raise AssertionError(f"{text}; assert_affine failed with {found_errors}")
raise AssertionError(f"{text} assert_affine failed with {found_errors}")

return not has_errors

Expand Down
13 changes: 9 additions & 4 deletions TPTBox/core/nii_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,6 +408,9 @@ def dtype(self)->type:
if self.__unpacked:
return self._arr.dtype # type: ignore
return self.nii.dataobj.dtype #type: ignore
@dtype.setter
def dtype(self, dtype:type):
self.set_dtype_(dtype)
@property
def header(self) -> Nifti1Header:
if self.__unpacked:
Expand All @@ -428,6 +431,9 @@ def affine(self,affine:np.ndarray):
def orientation(self) -> AX_CODES:
ort = nio.io_orientation(self.affine)
return nio.ornt2axcodes(ort) # type: ignore
@orientation.setter
def orientation(self, value: AX_CODES):
self.reorient_(value, verbose=False)
@property
def dims(self)->int:
self._unpack()
Expand All @@ -443,6 +449,9 @@ def zoom(self) -> ZOOMS:
z = z[:n]
#assert len(z) == 3,z
return z # type: ignore
@zoom.setter
def zoom(self, value: tuple[float, float, float]):
self.rescale_(value, verbose=False)

@property
def origin(self) -> tuple[float, float, float]:
Expand Down Expand Up @@ -477,10 +486,6 @@ def direction_itk(self) -> list:
a[:len(a)//3*2]*=-1
return a.tolist()

@orientation.setter
def orientation(self, value: AX_CODES):
self.reorient_(value, verbose=False)


def split_4D_image_to_3D(self):
assert self.get_num_dims() == 4,self.get_num_dims()
Expand Down
2 changes: 1 addition & 1 deletion TPTBox/core/np_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,7 @@ def np_calc_crop_around_centerpoint(
arr: np.ndarray,
cutout_size: tuple[int, ...],
pad_to_size: Sequence[int] | np.ndarray | int = 0,
) -> tuple[np.ndarray, tuple, tuple]:
) -> tuple[np.ndarray, tuple[slice, slice, slice], tuple]:
Copy link

Copilot AI Mar 3, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The return type annotation is now fixed to exactly three slices, but this function is n-dimensional (it builds cutout_coords_slices with length n_dim). This annotation will be incorrect for non-3D inputs and will break static typing for 2D/4D use. Consider changing the type to tuple[slice, ...] (or tuple[slice, ...] parameterized by n_dim) to match the implementation.

Copilot uses AI. Check for mistakes.
"""

Args:
Expand Down
5 changes: 3 additions & 2 deletions TPTBox/core/poi.py
Original file line number Diff line number Diff line change
Expand Up @@ -918,10 +918,10 @@ def calc_poi_from_subreg_vert(
save_buffer_file=False, # used by wrapper # noqa: ARG001
decimals=2,
subreg_id: int | Abstract_lvl | Sequence[int | Abstract_lvl] | Sequence[Abstract_lvl] | Sequence[int] = 50,
verbose: logging = True,
verbose: logging = False,
extend_to: POI | None = None,
# use_vertebra_special_action=True,
_vert_ids=None,
_vert_ids: list[int] | None = None,
_print_phases=False,
_orientation_version=0,
) -> POI:
Expand Down Expand Up @@ -1058,6 +1058,7 @@ def calc_poi_from_subreg_vert(
subreg_msk,
_vert_ids=_vert_ids,
log=log,
verbose=verbose,
_orientation_version=_orientation_version,
)
extend_to.apply_crop_reverse(crop, org_shape, inplace=True)
Expand Down
2 changes: 1 addition & 1 deletion TPTBox/core/poi_fun/poi_abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ def map_labels_(

def sort(self, inplace=True, order_dict: dict | None = None) -> Self:
"""Sort vertebra dictionary by sorting_list"""
if self.level_one_info is not None:
if self.level_one_info is not None and self.level_one_info != Any:
order_dict = self.level_one_info.order_dict()
poi = self.centroids._sort(inplace=inplace, order_dict=order_dict)
if inplace:
Expand Down
63 changes: 61 additions & 2 deletions TPTBox/core/poi_fun/ray_casting.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def unit_vector(vector):

# @njit(fastmath=True)
def trilinear_interpolate(volume, x, y, z):
xi, yi, zi = int(x), int(y), int(z)
xi, yi, zi = np.floor(x).astype(int), np.floor(y).astype(int), np.floor(z).astype(int)
if xi < 0 or yi < 0 or zi < 0 or xi >= volume.shape[0] - 1 or yi >= volume.shape[1] - 1 or zi >= volume.shape[2] - 1:
return 0.0

Expand Down Expand Up @@ -51,7 +51,7 @@ def max_distance_ray_cast_convex_npfast(
region_array: np.ndarray,
start_coord: np.ndarray,
direction_vector: np.ndarray,
acc_delta=0.05,
acc_delta=0.00005,
):
# Normalize direction
norm_vec = direction_vector / np.sqrt((direction_vector**2).sum())
Expand All @@ -70,6 +70,7 @@ def max_distance_ray_cast_convex_npfast(
y = start_coord[1] + norm_vec[1] * mid
z = start_coord[2] + norm_vec[2] * mid
val = trilinear_interpolate(region_array, x, y, z)
print(f"Raycast check at distance {mid:.2f}: value={val:.4f}")
if val > 0.5:
min_v = mid
else:
Expand All @@ -86,6 +87,64 @@ def max_distance_ray_cast_convex_npfast(
)


def max_distance_ray_cast_convex_np(
region: np.ndarray,
start_coord: COORDINATE | np.ndarray,
direction_vector: np.ndarray,
acc_delta: float = 0.00005,
max_v: int | None = None,
):
"""
Computes the maximum distance a ray can travel inside a convex region before exiting.

Parameters:
region (NII): The region of interest as a 3D NIfTI image.
start_coord (COORDINATE | np.ndarray): The starting coordinate of the ray.
direction_vector (np.ndarray): The direction vector of the ray.
acc_delta (float, optional): The accuracy threshold for bisection search. Default is 0.00005.

Returns:
np.ndarray: The exit coordinate of the ray within the region.
"""
start_point_np = np.asarray(start_coord)
if start_point_np is None:
return None

"""Convex assumption!"""
# Compute a normal vector, that defines the plane direction
normal_vector = np.asarray(direction_vector)
normal_vector = normal_vector / norm(normal_vector)
# Create a function to interpolate within the mask array
interpolator = RegularGridInterpolator([np.arange(region.shape[i]) for i in range(3)], region)

def is_inside(distance):
coords = [start_point_np[i] + normal_vector[i] * distance for i in [0, 1, 2]]
if any(i < 0 for i in coords):
return 0
if any(coords[i] > region.shape[i] - 1 for i in range(len(coords))):
return 0
# Evaluate the mask value at the interpolated coordinates
mask_value = interpolator(coords)
return mask_value > 0.5

if not is_inside(0):
return start_point_np
count = 0
min_v = 0
if max_v is None:
max_v = sum(region.shape)
delta = max_v * 2
while acc_delta < delta:
bisection = (max_v - min_v) / 2 + min_v
if is_inside(bisection):
min_v = bisection
else:
max_v = bisection
delta = max_v - min_v
count += 1
return start_point_np + normal_vector * ((min_v + max_v) / 2)


def max_distance_ray_cast_convex(
region: NII,
start_coord: COORDINATE | np.ndarray,
Expand Down
6 changes: 4 additions & 2 deletions TPTBox/core/poi_fun/vertebra_pois_non_centroids.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@ def __init__(self, target: Location, *prerequisite: Location, **args) -> None:
Strategy_Computed_Before(L.Spinal_Canal,L.Vertebra_Corpus)
Strategy_Computed_Before(L.Vertebra_Disc_Inferior,L.Vertebra_Disc_Inferior)


# fmt: on
def compute_non_centroid_pois( # noqa: C901
poi: POI,
Expand All @@ -244,6 +245,7 @@ def compute_non_centroid_pois( # noqa: C901
subreg: NII,
_vert_ids: Sequence[int] | None = None,
log: Logger_Interface = _log,
verbose: bool | None = True,
_orientation_version=0,
):
if _vert_ids is None:
Expand All @@ -254,7 +256,7 @@ def compute_non_centroid_pois( # noqa: C901
assert 52 not in poi.keys_region()

if Location.Vertebra_Direction_Inferior in locations:
log.on_text("Compute Vertebra DIRECTIONS")
log.on_text("Compute Vertebra DIRECTIONS", verbose=verbose)
### Calc vertebra direction; We always need them, so we just compute them. ###
sub_regions = poi.keys_subregion()
if any(a.value not in sub_regions for a in vert_directions):
Expand All @@ -268,7 +270,7 @@ def compute_non_centroid_pois( # noqa: C901
set(locations),
key=lambda x: all_poi_functions[x.value].prority() if x.value in all_poi_functions else x.value,
) # type: ignore
log.on_text("Calc pois from subregion id", {l.name for l in locations})
log.on_text("Calc pois from subregion id", {l.name for l in locations}, verbose=verbose)
### DENSE ###
if Location.Dens_axis in locations and 2 in _vert_ids and (2, Location.Dens_axis.value) not in poi:
a = subreg * vert.extract_label(2)
Expand Down
3 changes: 2 additions & 1 deletion TPTBox/tests/speedtests/speedtest_npunique.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
np_bbox_binary,
np_bounding_boxes,
np_center_of_mass,
np_is_empty,
np_map_labels,
np_unique,
np_unique_withoutzero,
Expand All @@ -33,7 +34,7 @@ def get_nii_array():
speed_test(
repeats=50,
get_input_func=get_nii_array,
functions=[np_unique, np.unique],
functions=[np_unique, np.unique, np_is_empty, np.max],
assert_equal_function=lambda x, y: True, # np.all([x[i] == y[i] for i in range(len(x))]), # noqa: ARG005
# np.all([x[i] == y[i] for i in range(len(x))])
)
Expand Down