Skip to content

Commit

Permalink
Merge pull request #36 from PolarBean/development
Browse files Browse the repository at this point in the history
Development

Former-commit-id: 79a73a0
  • Loading branch information
PolarBean authored Apr 27, 2023
2 parents 7d2574a + 9247fac commit c9da6ca
Show file tree
Hide file tree
Showing 5 changed files with 74 additions and 10 deletions.
4 changes: 4 additions & 0 deletions DeepSlice/coord_post_processing/angle_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,9 @@ def get_mean_angle(DV_list, ML_list, method, depths=None, species=None):
weighted_accuracy = plane_alignment.make_gaussian_weights(max)
else:
weighted_accuracy = [1.0] * len(df_center)
df_center = np.array(df_center)
df_center[df_center < min] = min
df_center[df_center > max] = max-1
weighted_accuracy = [weighted_accuracy[int(y)] for y in df_center]
print(weighted_accuracy)
DV_angle = np.average(DV_list, weights=weighted_accuracy)
Expand All @@ -111,6 +114,7 @@ def propagate_angles(df, method, species):
DV_angle, ML_angle = get_mean_angle(
DV_angle_list, ML_angle_list, method, depths, species
)
print(f"DV angle: {DV_angle}\nML angle: {ML_angle}")
# adjust the angles for each section in the dataset
df = set_angles(df, DV_angle, ML_angle)
return df
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,8 @@ def rotation_around_axis(axis, angle):
def make_gaussian_weights(size):
x = np.linspace(-np.pi, np.pi, size)
weights = np.exp(-(x ** 2) / 2) / np.sqrt(2 * np.pi)
weights[weights>size-1] = size-1
weights[weights<0] = 0
return weights


Expand Down
36 changes: 30 additions & 6 deletions DeepSlice/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,12 @@ def __init__(self, species):

def predict(
self,
image_directory: str,
image_directory: str = None,
ensemble: bool = None,
section_numbers: bool = True,
legacy_section_numbers=False,
image_list = None,
use_secondary_model = False,
):
"""predicts the atlas position for a folder full of histological brain sections
Expand All @@ -47,18 +49,40 @@ def predict(
if ensemble == None:
ensemble = self.config["ensemble_status"][self.species]
ensemble = eval(ensemble)

image_generator, width, height = neural_network.load_images(image_directory)
if image_list:
image_generator, width, height = neural_network.load_images_from_list(
image_list
)
if image_directory:
print(
"WARNING: image_directory is set but image_list is also set. image_directory will be ignored."
)
else:
image_generator, width, height = neural_network.load_images_from_path(
image_directory
)
primary_weights = metadata_loader.get_data_path(self.config["weight_file_paths"][self.species]["primary"], self.metadata_path)

secondary_weights = metadata_loader.get_data_path(self.config["weight_file_paths"][self.species]["secondary"], self.metadata_path)

if secondary_weights == "None":
print(f"ensemble is not available for {self.species}")
if use_secondary_model:
print("WARNING: use_secondary_model is set but no secondary model is available. use_secondary_model will be ignored.")
use_secondary_model = False
ensemble = False
predictions = neural_network.predictions_util(
self.model, image_generator, primary_weights, secondary_weights, ensemble
)
if use_secondary_model and ensemble:
print("WARNING: use_secondary_model is set but ensemble is also set. use_secondary_model will be ignored.")
use_secondary_model = False
if use_secondary_model:
print("Using secondary model")
predictions = neural_network.predictions_util(
self.model, image_generator, secondary_weights,None, ensemble
)
else:
predictions = neural_network.predictions_util(
self.model, image_generator, primary_weights, secondary_weights, ensemble
)
predictions["width"] = width
predictions["height"] = height
if section_numbers:
Expand Down
41 changes: 37 additions & 4 deletions DeepSlice/neural_network/neural_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,10 @@ def initialise_network(xception_weights: str, weights: str, species: str) -> Seq



def load_images(image_path: str) -> np.ndarray:

def load_images_from_path(image_path: str) -> np.ndarray:
"""
Load the images from the given path
:param image_path: The path to the images
:type image_path: str
:return: an Image generator for the found images
Expand Down Expand Up @@ -94,13 +94,46 @@ def load_images(image_path: str) -> np.ndarray:
y_col=None,
target_size=(299, 299),
batch_size=1,

colormode="rgb",
shuffle=False,
class_mode=None,
)
return image_generator, width, height


def load_images_from_list(image_list: list) -> np.ndarray:
"""
Load the images from the given list
:param image_list: The list of images
:type image_list: list
:return: an Image generator for the found images
:rtype: keras.preprocessing.image.ImageDataGenerator
"""
valid_formats = [".jpg", ".jpeg", ".png"]
images = [i for i in image_list if os.path.splitext(i)[1].lower() in valid_formats]
sizes = [get_image_size(i) for i in images]
width = [i[0] for i in sizes]
height = [i[1] for i in sizes]
if len(images) == 0:
raise ValueError(
f"No images found in the directory, please ensure image files are one of the following formats: {', '.join(valid_formats)}"
)
image_df = pd.DataFrame({"Filenames": images})
with warnings.catch_warnings():
##throws warning about samplewise_std_normalization conflicting with samplewise_center which we don't use.
warnings.simplefilter("ignore")
image_generator = ImageDataGenerator(
preprocessing_function=gray_scale, samplewise_std_normalization=True
).flow_from_dataframe(
image_df,
x_col="Filenames",
y_col=None,
target_size=(299, 299),
batch_size=1,
colormode="rgb",
shuffle=False,
class_mode=None,
)
return image_generator, width, height

def predictions_util(
model: Sequential,
Expand Down
1 change: 1 addition & 0 deletions unfinished_files.json

Large diffs are not rendered by default.

0 comments on commit c9da6ca

Please sign in to comment.