- bugfix now passing target layer to get_receptive_boxes to get correct values when using layer4

- target_layer and usel2normalization now logged in training
- added fire to experiment.py

Todos: matching parameters (thresholds, trails etc. need estimation)
process output information
parent 0bb782ce
......@@ -170,7 +170,8 @@ class Delf(torch.nn.Module):
# we will not be using the features in cuda anymore so detach to save memory
features = features.detach().cpu()
# calculate the receptive boxes in original scale
receptive_boxes = get_receptive_boxes(features.size(2), features.size(3), scale)
receptive_boxes = get_receptive_boxes(features.size(2), features.size(3), scale,
target_layer=self.target_layer)
# flatten h,w dimensions so we can append values from different scales
# spatial information can be inferred from the receptive boxes
features = features.view(features.size(1), -1).t()
......
......@@ -8,6 +8,7 @@ import uuid
import time
import json
import pickle
import fire
CENTER_CROP_SIZE_FINETUNING = 250
RANDOM_CROP_SITE_FINETUNING = 224
CENTER_CROP_SIZE_KEYPOINT = 900
......@@ -24,6 +25,8 @@ class ExperimentManager:
def __init__(self, experiment_name, stages, dataset, validation_split=0.2, batch_size=8, num_workers=4, epochs=30,
learning_rate=0.008, learning_rate_gamma=0.5, learning_rate_step_size=10, weight_decay=0.0001,
load_from=None, target_layer="layer3", use_l2_normalization=True, pca_dataset=None, pca_load=None):
print(f"Current working directory is {Path.cwd()}")
print(f"Running on CUDA:{torch.cuda.is_available()}")
print("Preparing experiment:")
# check if desired stages are in valid format
assert isinstance(stages, set), f"stages must be provided as set! E.g. {'finetuning', 'keypoints'}"
......@@ -36,7 +39,7 @@ class ExperimentManager:
# set the cuda device
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# create directory for the experiment
self.experiment_path = Path.joinpath(Path("../Experiments"), Path(experiment_name))
self.experiment_path = Path.joinpath(Path("Experiments"), Path(experiment_name))
if not self.experiment_path.is_dir():
self.experiment_path.mkdir(parents=True)
print(f"Results will be saved at {self.experiment_path}")
......@@ -326,6 +329,8 @@ class ExperimentManager:
"Learning_Rate_Step_Size:": self.learning_rate_step_size[stage],
"Weight_Decay": self.weight_decay[stage],
"Epochs": self.epochs[stage],
"Target_Layer": self.target_layer,
"L2_normalization": self.use_l2_normalization,
"Validation_Accuracy": val_accuracy_log,
"Validation_Loss": val_loss_log
}
......@@ -335,7 +340,8 @@ class ExperimentManager:
def perform_retrieval(self):
# load model in retrieval mode
model = Delf(None, "retrieval", self.load_paths["retrieval"]).to(self.device)
model = Delf(None, "retrieval", self.load_paths["retrieval"], target_layer=self.target_layer,
use_l2_normalization=self.use_l2_normalization).to(self.device)
model.eval()
# create dataset paths
index_dataset = self.dataset["retrieval"].joinpath(Path("index"))
......@@ -465,7 +471,10 @@ def check_experiment_wide_parameter(parameter, parameter_name, required_type, al
torch.backends.cudnn.benchmark = True
#exp = ExperimentManager("variable target layer", {"retrieval"}, "../../Datasets/Oxford/index", load_from={"retrieval":"../Experiments/variable target layer/keypoints/5db43e8d_dbb65c50.pth"}).perform_retrieval("../Experiments/variable target layer/keypoints/5db43e8d_dbb65c50.pth", "../../Datasets/Oxford/index", "../../Datasets/Oxford/query", pca_load="pca.pkl")
#exp = ExperimentManager("all_stages", {"finetuning","keypoints","retrieval"}, {"finetuning": "../../Datasets/Landmarks", "keypoints": "../../Datasets/Landmarks", "retrieval": "../../Datasets/Oxford"}, epochs=5)
#exp = ExperimentManager("all_stages", {"keypoints","retrieval"}, {"finetuning": "../../Datasets/Landmarks", "keypoints":"../../Datasets/Landmarks", "retrieval": "../../Datasets/Oxford"}, epochs=1, load_from={"keypoints":"../Experiments/all_stages/keypoints/c4ad755b_9c629910.pth"})
exp = ExperimentManager("variable target layer", {"keypoints"}, "../../Datasets/Landmarks", epochs=1, load_from={"keypoints":"../Experiments/variable target layer/finetuning/5db43e8d.pth"})
#exp = ExperimentManager("30_epoch_run", {"finetuning","keypoints","retrieval"}, {"finetuning": "../Datasets/Landmarks", "keypoints": "../../Datasets/Landmarks", "retrieval": "../../Datasets/Oxford"}, epochs=30)
#exp = ExperimentManager("30_epoch_run", {"retrieval"}, {"retrieval": "../Datasets/Oxford"}, load_from={"retrieval":"Experiments/30_epoch_run/keypoints/e3f41a23_d0f3a26d.pth"})
#exp = ExperimentManager("layer4", {"retrieval"}, {"retrieval": "../Datasets/Oxford"}, epochs=1, target_layer='layer4', load_from={"retrieval": "Experiments/layer4/keypoints/90a267db_05ef12cd.pth"})
#exp = ExperimentManager("all_stages", {"keypoints","retrieval"}, {"keypoints":"../Datasets/Landmarks", "retrieval": "../Datasets/Oxford"}, epochs=1, load_from={"keypoints":"Experiments/all_stages/keypoints/c48957fe_6d39b7dc.pth"})
#exp = ExperimentManager("variable target layer", {"keypoints"}, "../Datasets/Landmarks", epochs=1, load_from={"keypoints":"Experiments/variable target layer/finetuning/5db43e8d.pth"})
if __name__ == '__main__':
fire.Fire(ExperimentManager)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment