@@ -1039,7 +1039,7 @@ def consolidate_memory(self, bounding_box_threshold=0.3, occlusion_overlap_thre
1039
1039
def localise (self , image_path , depth_image_path , testname = "" , save_point_clouds = False ,
1040
1040
outlier_removal_config = None ,
1041
1041
fpfh_global_dist_factor = 1.5 , fpfh_local_dist_factor = 0.4 ,
1042
- fpfh_voxel_size = 0.05 ):
1042
+ fpfh_voxel_size = 0.05 , topK = 5 ):
1043
1043
"""
1044
1044
Given an image and a corresponding depth image in an unknown frame, consult the stored memory
1045
1045
and output a pose in the world frame of the point clouds stored in memory.
@@ -1097,52 +1097,48 @@ def localise(self, image_path, depth_image_path, testname="", save_point_clouds=
1097
1097
j = JCBB (cosine_similarities , R_matrices )
1098
1098
assns = j .get_assignments ()
1099
1099
1100
- # calculate paths for all assingments, pick the best one
1101
- best_assignment = assns [0 ]
1102
- min_cost = 1e11
1100
+ # only consider the top K assingments
1101
+ assns_to_consider = [assn [0 ] for assn in assns [:topK ]]
1103
1102
1104
- for assn in assns :
1105
- cost = 0
1103
+ print ("Phrases: " , detected_phrases )
1104
+ print (cosine_similarities )
1105
+ print ("Assignments being considered: " , assns_to_consider )
1106
+
1107
+ assn_data = [ [assn , None , None ] for assn in assns_to_consider ]
1106
1108
1107
- ### COST FUNCTION ###
1109
+ # go through all top K assingments, record ICP costs
1110
+ for assn_num , assn in enumerate (assns_to_consider ):
1111
+ # use ALL object pointclouds together
1112
+ all_detected_points = []
1113
+ all_memory_points = []
1108
1114
for i ,j in enumerate (assn ):
1109
- cost += (1 - cosine_similarities [i ,j ])
1110
- cost = np .log (cost ) * 1. / len (assn ) # normalized product of cosine DIFFERENCES
1111
- # NOTE: diving by length of assignments doesn't matter as all are same rn
1115
+ all_detected_points .append (detected_pointclouds [i ])
1116
+ all_memory_points .append (self .memory [j ].pcd )
1117
+ all_detected_points = np .concatenate (all_detected_points , axis = - 1 )
1118
+ all_memory_points = np .concatenate (all_memory_points , axis = - 1 )
1119
+
1120
+ # centering all the pointclouds
1121
+ detected_mean = np .mean (all_detected_points , axis = - 1 )
1122
+ memory_mean = np .mean (all_memory_points , axis = - 1 )
1123
+ all_detected_pcd = o3d .geometry .PointCloud ()
1124
+ all_detected_pcd .points = o3d .utility .Vector3dVector (all_detected_points .T - detected_mean )
1125
+ all_memory_pcd = o3d .geometry .PointCloud ()
1126
+ all_memory_pcd .points = o3d .utility .Vector3dVector (all_memory_points .T - memory_mean )
1127
+
1128
+ # remove outliers from detected pcds
1129
+ all_detected_pcd_filtered , _ = all_detected_pcd .remove_radius_outlier (nb_points = outlier_removal_config ["radius_nb_points" ],
1130
+ radius = outlier_removal_config ["radius" ])
1112
1131
1113
- # get min cost
1114
- if cost < min_cost :
1115
- min_cost = cost
1116
- best_assignment = assn
1132
+ transform , fitness = register_point_clouds (all_detected_pcd_filtered , all_memory_pcd ,
1133
+ voxel_size = fpfh_voxel_size , global_dist_factor = fpfh_global_dist_factor ,
1134
+ local_dist_factor = fpfh_local_dist_factor )
1117
1135
1118
- print ("Phrases: " , detected_phrases )
1119
- print (cosine_similarities )
1120
- print ("Assignment: " , best_assignment )
1121
-
1122
- # use ALL object pointclouds together
1123
- all_detected_points = []
1124
- all_memory_points = []
1125
- for i ,j in enumerate (best_assignment ):
1126
- all_detected_points .append (detected_pointclouds [i ])
1127
- all_memory_points .append (self .memory [j ].pcd )
1128
- all_detected_points = np .concatenate (all_detected_points , axis = - 1 )
1129
- all_memory_points = np .concatenate (all_memory_points , axis = - 1 )
1130
-
1131
- # centering all the pointclouds
1132
- detected_mean = np .mean (all_detected_points , axis = - 1 )
1133
- memory_mean = np .mean (all_memory_points , axis = - 1 )
1134
- all_detected_pcd = o3d .geometry .PointCloud ()
1135
- all_detected_pcd .points = o3d .utility .Vector3dVector (all_detected_points .T - detected_mean )
1136
- all_memory_pcd = o3d .geometry .PointCloud ()
1137
- all_memory_pcd .points = o3d .utility .Vector3dVector (all_memory_points .T - memory_mean )
1138
-
1139
- # remove outliers from detected pcds
1140
- all_detected_pcd_filtered , _ = all_detected_pcd .remove_radius_outlier (nb_points = outlier_removal_config ["radius_nb_points" ],
1141
- radius = outlier_removal_config ["radius" ])
1136
+ assn_data [assn_num ] = [assn , transform , fitness ]
1142
1137
1143
- transform = register_point_clouds (all_detected_pcd_filtered , all_memory_pcd ,
1144
- voxel_size = fpfh_voxel_size , global_dist_factor = fpfh_global_dist_factor ,
1145
- local_dist_factor = fpfh_local_dist_factor )
1138
+ best_assn = max (assn_data , key = lambda x : x [- 1 ])
1139
+
1140
+ assn = best_assn [0 ]
1141
+ transform = best_assn [1 ]
1146
1142
1147
1143
R = copy .copy (transform [:3 ,:3 ])
1148
1144
t = copy .copy (transform [:3 , 3 ])
@@ -1152,7 +1148,9 @@ def localise(self, image_path, depth_image_path, testname="", save_point_clouds=
1152
1148
1153
1149
localised_pose = np .concatenate ((tAvg , qAvg ))
1154
1150
1155
- return localised_pose
1151
+ # moved objects will have indices that are not present in the first row of assn
1152
+
1153
+ return localised_pose , assn
1156
1154
1157
1155
@dataclass
1158
1156
class LocalArgs :
@@ -1188,8 +1186,8 @@ class LocalArgs:
1188
1186
with open (poses_json_path , 'r' ) as f :
1189
1187
poses = json .load (f )
1190
1188
1191
- for target in range (1 ,9 ): # all of them
1192
- # for target in [6,7,8]: # sanity check
1189
+ # for target in range(1,9): # all of them
1190
+ for target in [6 ,7 ,8 ]: # sanity check
1193
1191
target_num = target
1194
1192
target_pose = None
1195
1193
for i , view in enumerate (poses ["views" ]):
0 commit comments