Skip to content

Commit 97c72bf

Browse files
committed
Add functionality to evaluate top K assignments based on cost
1 parent bfa45b3 commit 97c72bf

7 files changed

+1782
-66
lines changed

ablations_over_360.sh

100644100755
+37-7
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,38 @@
1+
#!/bin/bash --login
2+
#SBATCH -n 16
3+
#SBATCH -A research
4+
#SBATCH --mem-per-cpu=2048
5+
#SBATCH --time=72:00:00
6+
#SBATCH --mincpus=10
7+
#SBATCH --gres=gpu:1
8+
9+
#SBATCH --mail-type=ALL
10+
#SBATCH -w gnode041
11+
12+
# module add cuda/8.0
13+
# module add cudnn/7-cuda-8.0
14+
115
# Define arrays for possible values
216
DOWNSAMPLE_VOXEL_SIZES=(0 0.01 0.001)
317
GLOB_DIST_VALUES=(1 1.5 2.0)
418
LOCAL_DIST_VALUES=(0.1 0.4 0.8 1.2)
5-
FPFH_VOXEL_VALUES=(0.02 0.05 0.1 0.2)
19+
FPFH_VOXEL_VALUES=(0.2)
620
USE_MESH=0
721

22+
# conda init bash
23+
conda activate reid
24+
# /home2/$USER/Change_detection/download_weights.sh
25+
mkdir -p /scratch/$USER/360_runs/
26+
27+
cd /home2/$USER/Change_detection
28+
829
# Iterate over values
30+
for i in 1 2 3 4; do
31+
echo $i
32+
done;
33+
34+
python runthrough_360.py
35+
936
for DOWNSAMPLE_VOXEL_SIZE in "${DOWNSAMPLE_VOXEL_SIZES[@]}"; do
1037
for GLOB_DIST in "${GLOB_DIST_VALUES[@]}"; do
1138
for LOCAL_DIST in "${LOCAL_DIST_VALUES[@]}"; do
@@ -25,17 +52,20 @@ for DOWNSAMPLE_VOXEL_SIZE in "${DOWNSAMPLE_VOXEL_SIZES[@]}"; do
2552
python runthrough_360.py \
2653
--sam-checkpoint-path=/scratch/$USER/sam_vit_h_4b8939.pth \
2754
--ram-pretrained-path=/scratch/$USER/ram_swin_large_14m.pth \
28-
--test-folder-path=/home2/$USER/Documents/aneesh_thread/Change_detection/360_zip \
29-
--memory-save-path=/scratch/$USER/vin-experiments/360_runs/memory_${ABLATION_NAME}.pcd \
55+
--test-folder-path=/home2/$USER/Change_detection/360_zip \
56+
--memory-save-path=/scratch/$USER/360_runs/memory_${ABLATION_NAME}.pcd \
3057
--down-sample-voxel-size=${DOWNSAMPLE_VOXEL_SIZE} \
3158
--no-create-ext-mesh \
32-
--save-results-path=/scratch/$USER/vin-experiments/360_runs/results_${ABLATION_NAME}.json \
59+
--save-results-path=/scratch/$USER/360_runs/results_${ABLATION_NAME}.json \
3360
--fpfh-global-dist-factor=${GLOB_DIST} \
3461
--fpfh-local-dist-factor=${LOCAL_DIST} \
35-
--fpfh-voxel-size=${FPFH_VOXEL} \
36-
> /scratch/$USER/vin-experiments/360_runs/run_${ABLATION_NAME}.txt
62+
--fpfh-voxel-size=${FPFH_VOXEL}
63+
# > /scratch/$USER/360_runs/run_${ABLATION_NAME}.txt
64+
65+
echo "Test done!"
66+
exit 0
3767

3868
done
3969
done
4070
done
41-
done
71+
done

fpfh/fpfh_register.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -52,4 +52,4 @@ def register_point_clouds(source, target, voxel_size, global_dist_factor = 1.5,
5252
o3d.pipelines.registration.TransformationEstimationPointToPoint()
5353
)
5454

55-
return result_icp.transformation
55+
return result_icp.transformation, result_icp.fitness

jcbb.py

+26-6
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
import torch
44
from copy import copy
55

6+
import time
7+
68
class JCBB():
79
def __init__(self, cosine_similarities, R_matrices):
810
self.cosine_similarities = cosine_similarities
@@ -19,8 +21,12 @@ def __init__(self, cosine_similarities, R_matrices):
1921
# get all assignment paths
2022
def get_assignments(self):
2123
self.leaf_nodes = []
22-
JCBBNode(self, self.num_detected, 0, [], [i for i in range(self.num_memory)])
24+
JCBBNode(self, self.num_detected, 0, 0, [], [i for i in range(self.num_memory)])
25+
self.sort_assignments()
2326
return self.leaf_nodes
27+
28+
def sort_assignments(self):
29+
self.leaf_nodes = sorted(self.leaf_nodes, key=lambda x: x[1], reverse=True)
2430

2531
"""
2632
Node in the JCBB tree
@@ -30,34 +36,48 @@ def get_assignments(self):
3036
assignment : j
3137
path : array of assignments before this (index is i, value is j)
3238
choices_left : list of indices left to assign
39+
40+
calculate the product of similarities as we go down (add log costs)
3341
"""
34-
def JCBBNode(tree, max_depth, depth, path, choices_left):
42+
def JCBBNode(tree, max_depth, similarity_sum, depth, path, choices_left):
3543
if depth == max_depth:
36-
tree.leaf_nodes.append(path)
44+
tree.leaf_nodes.append([path, similarity_sum/max_depth])
3745
return
3846

3947
# recurse through
4048
for c in choices_left:
49+
# add the cosine similarity of this assignment to the cost
50+
new_sum = similarity_sum + np.log(tree.cosine_similarities[depth][c])
51+
4152
child_path = copy(path)
42-
53+
4354
# let this child be assignment (depth+1, c), remove c from choices
4455
child_path.append(c)
4556
child_choices_left = [d for d in choices_left if d != c]
4657

4758
JCBBNode(
4859
tree,
4960
max_depth,
61+
new_sum,
5062
depth+1,
5163
child_path,
5264
child_choices_left
5365
)
5466

5567
if __name__ == '__main__':
56-
j = JCBB(np.zeros((3,4)), None)
68+
sim = np.random.uniform(size=(4,20))
69+
70+
start = time.time()
71+
72+
j = JCBB(sim, None)
5773
paths = j.get_assignments()
5874
# for p in paths:
5975
# for k, v in p.items():
6076
# print(k,v, sep="|", end=" ")
6177
# print()
6278

63-
print(paths)
79+
print("Time taken: ", time.time()-start)
80+
81+
# print(sim)
82+
83+
# print(paths)

object_memory.py

+42-44
Original file line numberDiff line numberDiff line change
@@ -1039,7 +1039,7 @@ def consolidate_memory(self, bounding_box_threshold=0.3, occlusion_overlap_thre
10391039
def localise(self, image_path, depth_image_path, testname="", save_point_clouds=False,
10401040
outlier_removal_config=None,
10411041
fpfh_global_dist_factor = 1.5, fpfh_local_dist_factor = 0.4,
1042-
fpfh_voxel_size = 0.05):
1042+
fpfh_voxel_size = 0.05, topK=5):
10431043
"""
10441044
Given an image and a corresponding depth image in an unknown frame, consult the stored memory
10451045
and output a pose in the world frame of the point clouds stored in memory.
@@ -1097,52 +1097,48 @@ def localise(self, image_path, depth_image_path, testname="", save_point_clouds=
10971097
j = JCBB(cosine_similarities, R_matrices)
10981098
assns = j.get_assignments()
10991099

1100-
# calculate paths for all assingments, pick the best one
1101-
best_assignment = assns[0]
1102-
min_cost = 1e11
1100+
# only consider the top K assingments
1101+
assns_to_consider = [assn[0] for assn in assns[:topK]]
11031102

1104-
for assn in assns:
1105-
cost = 0
1103+
print("Phrases: ", detected_phrases)
1104+
print(cosine_similarities)
1105+
print("Assignments being considered: ", assns_to_consider)
1106+
1107+
assn_data = [ [assn, None, None] for assn in assns_to_consider ]
11061108

1107-
### COST FUNCTION ###
1109+
# go through all top K assingments, record ICP costs
1110+
for assn_num, assn in enumerate(assns_to_consider):
1111+
# use ALL object pointclouds together
1112+
all_detected_points = []
1113+
all_memory_points = []
11081114
for i,j in enumerate(assn):
1109-
cost += (1 - cosine_similarities[i,j])
1110-
cost = np.log(cost) * 1./len(assn) # normalized product of cosine DIFFERENCES
1111-
# NOTE: diving by length of assignments doesn't matter as all are same rn
1115+
all_detected_points.append(detected_pointclouds[i])
1116+
all_memory_points.append(self.memory[j].pcd)
1117+
all_detected_points = np.concatenate(all_detected_points, axis=-1)
1118+
all_memory_points = np.concatenate(all_memory_points, axis=-1)
1119+
1120+
# centering all the pointclouds
1121+
detected_mean = np.mean(all_detected_points, axis=-1)
1122+
memory_mean = np.mean(all_memory_points, axis=-1)
1123+
all_detected_pcd = o3d.geometry.PointCloud()
1124+
all_detected_pcd.points = o3d.utility.Vector3dVector(all_detected_points.T - detected_mean)
1125+
all_memory_pcd = o3d.geometry.PointCloud()
1126+
all_memory_pcd.points = o3d.utility.Vector3dVector(all_memory_points.T - memory_mean)
1127+
1128+
# remove outliers from detected pcds
1129+
all_detected_pcd_filtered, _ = all_detected_pcd.remove_radius_outlier(nb_points=outlier_removal_config["radius_nb_points"],
1130+
radius=outlier_removal_config["radius"])
11121131

1113-
# get min cost
1114-
if cost < min_cost:
1115-
min_cost = cost
1116-
best_assignment = assn
1132+
transform, fitness = register_point_clouds(all_detected_pcd_filtered, all_memory_pcd,
1133+
voxel_size = fpfh_voxel_size, global_dist_factor = fpfh_global_dist_factor,
1134+
local_dist_factor = fpfh_local_dist_factor)
11171135

1118-
print("Phrases: ", detected_phrases)
1119-
print(cosine_similarities)
1120-
print("Assignment: ", best_assignment)
1121-
1122-
# use ALL object pointclouds together
1123-
all_detected_points = []
1124-
all_memory_points = []
1125-
for i,j in enumerate(best_assignment):
1126-
all_detected_points.append(detected_pointclouds[i])
1127-
all_memory_points.append(self.memory[j].pcd)
1128-
all_detected_points = np.concatenate(all_detected_points, axis=-1)
1129-
all_memory_points = np.concatenate(all_memory_points, axis=-1)
1130-
1131-
# centering all the pointclouds
1132-
detected_mean = np.mean(all_detected_points, axis=-1)
1133-
memory_mean = np.mean(all_memory_points, axis=-1)
1134-
all_detected_pcd = o3d.geometry.PointCloud()
1135-
all_detected_pcd.points = o3d.utility.Vector3dVector(all_detected_points.T - detected_mean)
1136-
all_memory_pcd = o3d.geometry.PointCloud()
1137-
all_memory_pcd.points = o3d.utility.Vector3dVector(all_memory_points.T - memory_mean)
1138-
1139-
# remove outliers from detected pcds
1140-
all_detected_pcd_filtered, _ = all_detected_pcd.remove_radius_outlier(nb_points=outlier_removal_config["radius_nb_points"],
1141-
radius=outlier_removal_config["radius"])
1136+
assn_data[assn_num] = [assn, transform, fitness]
11421137

1143-
transform = register_point_clouds(all_detected_pcd_filtered, all_memory_pcd,
1144-
voxel_size = fpfh_voxel_size, global_dist_factor = fpfh_global_dist_factor,
1145-
local_dist_factor = fpfh_local_dist_factor)
1138+
best_assn = max(assn_data, key=lambda x: x[-1])
1139+
1140+
assn = best_assn[0]
1141+
transform = best_assn[1]
11461142

11471143
R = copy.copy(transform[:3,:3])
11481144
t = copy.copy(transform[:3, 3])
@@ -1152,7 +1148,9 @@ def localise(self, image_path, depth_image_path, testname="", save_point_clouds=
11521148

11531149
localised_pose = np.concatenate((tAvg, qAvg))
11541150

1155-
return localised_pose
1151+
# moved objects will have indices that are not present in the first row of assn
1152+
1153+
return localised_pose, assn
11561154

11571155
@dataclass
11581156
class LocalArgs:
@@ -1188,8 +1186,8 @@ class LocalArgs:
11881186
with open(poses_json_path, 'r') as f:
11891187
poses = json.load(f)
11901188

1191-
for target in range(1,9): # all of them
1192-
# for target in [6,7,8]: # sanity check
1189+
# for target in range(1,9): # all of them
1190+
for target in [6,7,8]: # sanity check
11931191
target_num = target
11941192
target_pose = None
11951193
for i, view in enumerate(poses["views"]):

0 commit comments

Comments
 (0)