From b1deeebed4ea53b0449b86beba505464bfd20af7 Mon Sep 17 00:00:00 2001 From: zuern Date: Tue, 9 May 2023 10:46:14 +0200 Subject: [PATCH] update --- cfg-rlgpu.yaml | 2 +- driving/driver.py | 548 ++++++++++++++++------------------------- driving/utils.py | 24 +- evaluate.py | 41 ++- inference_regressor.py | 193 ++++++++++----- training-rlgpu.sh | 86 ++++--- 6 files changed, 426 insertions(+), 468 deletions(-) diff --git a/cfg-rlgpu.yaml b/cfg-rlgpu.yaml index eeb3794..82110dc 100644 --- a/cfg-rlgpu.yaml +++ b/cfg-rlgpu.yaml @@ -23,7 +23,7 @@ model: dataparallel: True device: cuda batch_size: 4 - batch_size_reg: 128 + batch_size_reg: 384 lr: 1e-4 beta_lo: 0.9 beta_hi: 0.999 diff --git a/driving/driver.py b/driving/driver.py index 9cf44c5..666e0c7 100644 --- a/driving/driver.py +++ b/driving/driver.py @@ -28,7 +28,7 @@ edge_start_idx = 10 # start index for selecting edge as future pose edge_end_idx = 50 # end index for selecting edge as future pose write_every = 10 # write to disk every n steps - +waitkey_ms = 2 # CVPR graph aggregation @@ -38,7 +38,8 @@ init_poses = { "austin_83_34021_46605": np.array([1163, 2982, -2.69]), - "pittsburgh_36_27706_11407": np.array([1789, 2280, np.pi]) + "pittsburgh_36_27706_11407": np.array([1789, 2280, 0.4 * np.pi]), + 'pittsburgh_19_12706_31407': np.array([1789, 2280, 0.4 * np.pi]), } @@ -355,6 +356,11 @@ def make_step(self): cv2.imshow("pred_succ", pred_succ) skeleton = skeletonize_prediction(pred_succ, threshold=skeleton_threshold) + + self.skeleton = skeleton + + self.pred_succ = pred_succ + self.pred_drivable = pred_drivable self.graph_skeleton = skeleton_to_graph(skeleton) for edge in self.graph_skeleton.edges(): @@ -370,9 +376,9 @@ def make_step(self): pred_angles_succ_color = self.ac.angle_to_color(pred_angles, mask=pred_succ > skeleton_threshold) pred_angles_color = self.ac.angle_to_color(pred_angles, mask=pred_drivable > 0.3) - # pred_angles_color - # cv2.imshow("pred_angles_color", pred_angles_color) - # cv2.imshow("rgb", rgb) + cv2.imshow("skeleton", skeleton) + cv2.imshow("pred_angles_color", pred_angles_color) + cv2.imshow("rgb", rgb) self.add_pred_to_canvas(skeleton) @@ -391,27 +397,27 @@ def make_step(self): node_positions = np.array([nodes[i]['o'] for i in nodes]) [cv2.circle(pred_succ_viz, (int(p[0]), int(p[1])), 4, (0, 255, 0), -1) for p in node_positions] - # cv2.imshow("pred_succ_viz", pred_succ_viz) - # cv2.imshow("pred_drivable", pred_drivable) - # cv2.imshow("pred_angles_succ_color", pred_angles_succ_color) + + skeleton_drivable_weight = np.sum(skeleton * pred_drivable) + skeleton_succ_weight = np.sum(skeleton * pred_succ / 255.) if self.debug: - fig, axarr = plt.subplots(1, 6, figsize=(20, 20), sharex=True, sharey=True) + fig, axarr = plt.subplots(1, 6, figsize=(20, 5), sharex=True, sharey=True) axarr[0].imshow(cv2.cvtColor(rgb, cv2.COLOR_BGR2RGB)) axarr[0].title.set_text('rgb') axarr[1].imshow(pred_drivable) - axarr[1].title.set_text('pred_drivable') + axarr[1].title.set_text('pred_drivable - {:.0f}'.format(skeleton_drivable_weight)) axarr[2].imshow(pred_succ) - axarr[2].title.set_text('pred_succ') + axarr[2].title.set_text('pred_succ - {:.0f}'.format(skeleton_succ_weight)) axarr[3].imshow(pred_angles_color) axarr[3].title.set_text('pred_angles_color') axarr[4].imshow(pred_angles_succ_color) axarr[4].title.set_text('pred_angles_succ_color') axarr[5].imshow(skeleton) axarr[5].title.set_text('skeleton') - plt.show() + plt.savefig("/home/zuern/Desktop/autograph/tmp/debug/{}-{:04d}_matplotlib.png".format(self.tile_id, self.step)) - cv2.imwrite("/home/zuern/Desktop/tmp/other/{}-{:04d}_pred_succ_viz.png".format(self.tile_id, self.step), pred_succ_viz) + # cv2.imwrite("/home/zuern/Desktop/autograph/tmp/debug/{}-{:04d}_pred_succ_viz.png".format(self.tile_id, self.step), pred_succ_viz) self.step += 1 @@ -487,6 +493,8 @@ def visualize_write_G_single(self, graphs, name="G"): end = (int(end[0]), int(end[1])) cv2.arrowedLine(G_agg_viz, start, end, color=colors[i], thickness=1, line_type=cv2.LINE_AA) + pos = (int(self.pose_history[i, 0]), int(self.pose_history[i, 1]) - 10) + cv2.putText(G_agg_viz, "{} - {:.0f}".format(i, graph.graph["succ_graph_weight"]), pos, cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1, cv2.LINE_AA) cv2.imwrite("/home/zuern/Desktop/autograph/tmp/G_agg/{}-{:04d}_{}_viz.png".format(self.tile_id, self.step, name), G_agg_viz) @@ -613,7 +621,7 @@ def crop_coordintates_to_global(self, pose, pos_local): def drive_freely(self): - # if self.step > 200: + # if self.step > 50: # self.done = True # return @@ -626,315 +634,190 @@ def drive_freely(self): self.make_step() return - # OLD DRIVING CODE - - # G_current_local = self.graph_skeleton.copy() - # - # G_current_local_pruned = self.graph_skeleton.copy() - # - # - # # shorten all edges to 50 pixels - # for edge in G_current_local_pruned.edges: - # if len(list(G_current_local_pruned.successors(edge[1]))) > 0: - # continue - # pts = G_current_local_pruned.edges[edge]['pts'] - # pts = pts[0:10, :] - # G_current_local_pruned.edges[edge]['pts'] = pts - # - # # also adjust node position - # G_current_local_pruned.nodes[edge[1]]['pos'] = pts[-1, :] - # - # - # G_current_global_pruned = nx.DiGraph() - # - # # add nodes and edges from self.graph_skeleton and transform to global coordinates (for aggregation) - # for node in G_current_local_pruned.nodes: - # # transform pos_start to global coordinates - # pos_local = nx.get_node_attributes(G_current_local_pruned, "pts")[node][0].astype(np.float32) - # pos_global = self.crop_coordintates_to_global(self.pose, pos_local) - # - # G_current_global_pruned.add_node(node, - # pos=pos_global, - # weight=1.0, - # score=1.0,) - # - # for edge in G_current_local_pruned.edges: - # edge_points = G_current_local_pruned.edges[edge]["pts"] - # edge_points = self.crop_coordintates_to_global(self.pose, edge_points) - # G_current_global_pruned.add_edge(edge[0], edge[1], pts=edge_points) - # - # self.graphs.append(G_current_global_pruned) - # - # - # successor_points = [] - # for node in G_current_local.nodes: - # if len(list(G_current_local.successors(node))) >= 1: - # successor_points.append(node) - # - # succ_edges = [] - # for successor_point in successor_points: - # succ = list(G_current_local.successors(successor_point)) - # for successor in succ: - # succ_edges.append(G_current_local.edges[successor_point, successor]) - # - # if len(succ_edges) == 0: - # print(" No successor edges found.") - # - # for edge in succ_edges: - # - # num_points_in_edge = len(edge["pts"]) - # if num_points_in_edge < edge_end_idx+1: - # continue - # - # pos_start_local = np.array([edge["pts"][edge_start_idx][1], - # edge["pts"][edge_start_idx][0]]) - # pos_end_local = np.array([edge["pts"][edge_end_idx][1], - # edge["pts"][edge_end_idx][0]]) - # - # edge_start_global = self.crop_coordintates_to_global(self.pose, pos_start_local) - # edge_end_global = self.crop_coordintates_to_global(self.pose, pos_end_local) - # - # - # edge_local = pos_end_local - pos_start_local - # angle_local = np.arctan2(edge_local[1], -edge_local[0]) - # angle_global = self.pose[2] + angle_local - # - # # step_sizes = [20, 40, 60] # number of pixels to move forward along edge - # step_sizes = [40] # number of pixels to move forward along edge - # - # for step_size in step_sizes: - # - # - # # define future pose - # future_pose_global = np.zeros(3) - # diff = step_size * (edge_end_global - edge_start_global) / np.linalg.norm(edge_end_global - edge_start_global) - # future_pose_global[0:2] = edge_start_global + diff - # future_pose_global[2] = self.yaw_check(angle_global) - # - # - # # put future pose in queue if not yet visited - # was_visited = similarity_check(future_pose_global, self.pose_history, min_dist=20, min_angle=np.pi/4) - # is_already_in_queue = similarity_check(future_pose_global, self.future_poses, min_dist=20, min_angle=np.pi/4) - # - # if not was_visited and not is_already_in_queue: - # self.future_poses.append(future_pose_global) - # print(" put pose in queue: {:.0f}, {:.0f}, {:.1f} (step size: {})".format(future_pose_global[0], - # future_pose_global[1], - # future_pose_global[2], - # step_size)) - # - # # add edge to aggregated graph - # pointlist_local = np.array(edge["pts"][edge_start_idx:edge_end_idx]) - # - # pointlist_global = self.crop_coordintates_to_global(self.pose, pointlist_local) - # - # node_edge_start = (int(edge_start_global[0]), int(edge_start_global[1])) - # node_edge_end = (int(edge_end_global[0]), int(edge_end_global[1])) - # - # # add G_agg-edge from edge start to edge end - # self.G_agg_naive.add_node(node_edge_start, pos=edge_start_global) - # self.G_agg_naive.add_node(node_edge_end, pos=edge_end_global) - # self.G_agg_naive.add_edge(node_edge_start, node_edge_end, pts=pointlist_global) - # - # break - - - # NEW DRIVING CODE - - - G_current_local = self.graph_skeleton.copy() - G_current_global = nx.DiGraph() - - - # add nodes and edges from self.graph_skeleton and transform to global coordinates (for aggregation) - for node in G_current_local.nodes: - # transform pos_start to global coordinates - pos_local = nx.get_node_attributes(G_current_local, "pts")[node][0].astype(np.float32) - pos_global = self.crop_coordintates_to_global(self.pose, pos_local) - - G_current_global.add_node(node, - pos=pos_global, - weight=1.0, - score=1.0,) - - for edge in G_current_local.edges: - edge_points = G_current_local.edges[edge]["pts"] - edge_points = self.crop_coordintates_to_global(self.pose, edge_points) - G_current_global.add_edge(edge[0], edge[1], pts=edge_points) - - # convert to smooth graph - G_current_global_dense = roundify_skeleton_graph(G_current_global) - self.graphs.append(G_current_global_dense) - - successor_points = [] - for node in G_current_global.nodes: - if len(list(G_current_global.successors(node))) >= 1: - successor_points.append(node) - - succ_edges = [] - for successor_point in successor_points: - succ = list(G_current_global.successors(successor_point)) - for successor in succ: - succ_edges.append(G_current_global.edges[successor_point, successor]) - - if len(succ_edges) == 0: - print(" No successor edges found.") - - # loop over all successor edges to add them to the aggregated graph + # # calculate the edge weights of the current graph + # succ_graph_weight = 0 + # for edge in G_current_local.edges: + # edges_u = G_current_local.edges[edge]['pts'][:, 0].astype(np.uint8) + # edges_v = G_current_local.edges[edge]['pts'][:, 1].astype(np.uint8) + # # edge_len = np.linalg.norm(G_current_local.edges[edge]['pts'][0] - G_current_local.edges[edge]['pts'][-1]) + # # succ_graph_weight += np.sum(self.pred_succ[edges_u, edges_v]) + # succ_graph_weight += np.sum(self.pred_succ[edges_u, edges_v]) - - # loop over all successor edges to find future poses - for edge in succ_edges: - - num_points_in_edge = len(edge["pts"]) - if num_points_in_edge < edge_end_idx+1: - continue - - pos_start = np.array([edge["pts"][edge_start_idx][0], - edge["pts"][edge_start_idx][1]]) - pos_end = np.array([edge["pts"][edge_end_idx][0], - edge["pts"][edge_end_idx][1]]) - - edge_delta = pos_end - pos_start - angle_global = np.arctan2(edge_delta[0], -edge_delta[1]) - - # step_sizes = [20, 40, 60] # number of pixels to move forward along edge - step_sizes = [40] - - for step_size in step_sizes: - - # define future pose - future_pose_global = np.zeros(3) - diff = step_size * (pos_end - pos_start) / np.linalg.norm(pos_end - pos_start) - future_pose_global[0:2] = pos_start + diff - future_pose_global[2] = self.yaw_check(angle_global) - - # put future pose in queue if not yet visited - was_visited = similarity_check(future_pose_global, - self.pose_history, - min_dist=20, - min_angle=np.pi/4) - is_already_in_queue = similarity_check(future_pose_global, - self.future_poses, - min_dist=20, - min_angle=np.pi/4) - - if not was_visited and not is_already_in_queue: - - self.future_poses.append(future_pose_global) - print(" put pose in queue: {:.0f}, {:.0f}, {:.1f} (step size: {})".format(future_pose_global[0], - future_pose_global[1], - future_pose_global[2], - step_size)) - - # add edge to aggregated graph - pointlist = np.array(edge["pts"][edge_start_idx:edge_end_idx]) - - node_edge_start = (int(pos_start[0]), int(pos_start[1])) - node_edge_end = (int(pos_end[0]), int(pos_end[1])) - - # add G_agg-edge from edge start to edge end - self.G_agg_naive.add_node(node_edge_start, pos=pos_start) - self.G_agg_naive.add_node(node_edge_end, pos=pos_end) - self.G_agg_naive.add_edge(node_edge_start, node_edge_end, pts=pointlist) - - # add G_agg-edge from current pose to edge start - if np.linalg.norm(pos_start - self.pose[0:2]) < 50: - node_current_pose = (int(self.pose[0]), int(self.pose[1])) - self.G_agg_naive.add_node(node_current_pose, pos=self.pose[0:2]) - self.G_agg_naive.add_edge(node_current_pose, node_edge_start) - - # add G_agg-edge from edge end to future pose start - closest_distance = 100000 - closest_edge = None - for inner_edge in succ_edges: - distance = np.linalg.norm(edge["pts"][0] - inner_edge["pts"][-1]) - if distance < 1e-3: # same edge - continue - if distance < closest_distance and distance < 100: - closest_distance = distance - closest_edge = inner_edge - - if closest_edge is not None: - if len(closest_edge["pts"]) > edge_end_idx: - print(" adding edge from edge end to future pose start") - pos_start = closest_edge["pts"][edge_end_idx] - node_start = (int(pos_start[0]), int(pos_start[1])) - self.G_agg_naive.add_node(node_start, pos=pos_start) - self.G_agg_naive.add_edge(node_start, node_edge_start) - break - - - - - - - - - - - - - - - - - - - - - - - - - if self.step % write_every == 0: - self.render_poses_in_aerial() - self.visualize_write_G_agg(self.G_agg_naive, "G_agg_naive") - self.visualize_write_G_single(self.graphs, "G_single") - - # G_agg_cvpr = driver.aggregate_graphs(self.graphs) - - # fig, axarr = plt.subplots(1, 3, figsize=(15, 5), sharex=True, sharey=True) - # [ax.set_aspect('equal') for ax in axarr] - # [ax.invert_yaxis() for ax in axarr] - # axarr[0].set_title("g single") - # axarr[1].set_title("G_agg_naive") - # axarr[2].set_title("G_agg_cvpr") - # [visualize_graph(g, axarr[0], node_color="g", edge_color="g") for g in self.graphs] - # visualize_graph(self.G_agg_naive, axarr[1], node_color="b", edge_color="b") - # visualize_graph(G_agg_cvpr, axarr[2], node_color="r", edge_color="r") - # plt.show() - - - - print(" Pose queue size: {}".format(len(self.future_poses))) + succ_graph_weight = np.sum(self.skeleton * self.pred_drivable) + + # do branch_alive check + branch_alive = True + if succ_graph_weight < 50: + print(" Successor Graph too weak, aborting branch") + branch_alive = False + + + if branch_alive: + + G_current_global = nx.DiGraph() + + # add nodes and edges from self.graph_skeleton and transform to global coordinates (for aggregation) + for node in G_current_local.nodes: + # transform pos_start to global coordinates + pos_local = nx.get_node_attributes(G_current_local, "pts")[node][0].astype(np.float32) + pos_global = self.crop_coordintates_to_global(self.pose, pos_local) + + G_current_global.add_node(node, + pos=pos_global, + weight=1.0, + score=1.0,) + + for edge in G_current_local.edges: + edge_points = G_current_local.edges[edge]["pts"] + edge_points = self.crop_coordintates_to_global(self.pose, edge_points) + G_current_global.add_edge(edge[0], edge[1], pts=edge_points) + + # convert to smooth graph + G_current_global_dense = roundify_skeleton_graph(G_current_global) + G_current_global_dense.graph["succ_graph_weight"] = succ_graph_weight + self.graphs.append(G_current_global_dense) + + successor_points = [] + for node in G_current_global.nodes: + if len(list(G_current_global.successors(node))) >= 1: + successor_points.append(node) + + succ_edges = [] + for successor_point in successor_points: + succ = list(G_current_global.successors(successor_point)) + for successor in succ: + succ_edges.append(G_current_global.edges[successor_point, successor]) + + if len(succ_edges) == 0: + print(" No successor edges found.") + + + # loop over all successor edges to find future poses + for edge in succ_edges: + + num_points_in_edge = len(edge["pts"]) + if num_points_in_edge < edge_end_idx+1: + continue + + pos_start = np.array([edge["pts"][edge_start_idx][0], + edge["pts"][edge_start_idx][1]]) + pos_end = np.array([edge["pts"][edge_end_idx][0], + edge["pts"][edge_end_idx][1]]) + + edge_delta = pos_end - pos_start + angle_global = np.arctan2(edge_delta[0], -edge_delta[1]) + + # step_sizes = [20, 40, 60] # number of pixels to move forward along edge + step_sizes = [40] + + for step_size in step_sizes: + + # define future pose + future_pose_global = np.zeros(3) + diff = step_size * (pos_end - pos_start) / np.linalg.norm(pos_end - pos_start) + future_pose_global[0:2] = pos_start + diff + future_pose_global[2] = self.yaw_check(angle_global) + + # put future pose in queue if not yet visited + was_visited = similarity_check(future_pose_global, + self.pose_history, + min_dist=20, + min_angle=np.pi/4) + is_already_in_queue = similarity_check(future_pose_global, + self.future_poses, + min_dist=20, + min_angle=np.pi/4) + + if not was_visited and not is_already_in_queue: + + self.future_poses.append(future_pose_global) + print(" put pose in queue: {:.0f}, {:.0f}, {:.1f} (step size: {})".format(future_pose_global[0], + future_pose_global[1], + future_pose_global[2], + step_size)) + + # add edge to aggregated graph + pointlist = np.array(edge["pts"][edge_start_idx:edge_end_idx]) + + node_edge_start = (int(pos_start[0]), int(pos_start[1])) + node_edge_end = (int(pos_end[0]), int(pos_end[1])) + + # add G_agg-edge from edge start to edge end + self.G_agg_naive.add_node(node_edge_start, pos=pos_start) + self.G_agg_naive.add_node(node_edge_end, pos=pos_end) + self.G_agg_naive.add_edge(node_edge_start, node_edge_end, pts=pointlist) + + # add G_agg-edge from current pose to edge start + if np.linalg.norm(pos_start - self.pose[0:2]) < 50: + node_current_pose = (int(self.pose[0]), int(self.pose[1])) + self.G_agg_naive.add_node(node_current_pose, pos=self.pose[0:2]) + self.G_agg_naive.add_edge(node_current_pose, node_edge_start) + + # add G_agg-edge from edge end to future pose start + closest_distance = 100000 + closest_edge = None + for inner_edge in succ_edges: + distance = np.linalg.norm(edge["pts"][0] - inner_edge["pts"][-1]) + if distance < 1e-3: # same edge + continue + if distance < closest_distance and distance < 100: + closest_distance = distance + closest_edge = inner_edge + + if closest_edge is not None: + if len(closest_edge["pts"]) > edge_end_idx: + print(" adding edge from edge end to future pose start") + pos_start = closest_edge["pts"][edge_end_idx] + node_start = (int(pos_start[0]), int(pos_start[1])) + self.G_agg_naive.add_node(node_start, pos=pos_start) + self.G_agg_naive.add_edge(node_start, node_edge_start) + break + + if self.step % write_every == 0: + self.render_poses_in_aerial() + self.visualize_write_G_agg(self.G_agg_naive, "G_agg_naive") + self.visualize_write_G_single(self.graphs, "G_single") + + # G_agg_cvpr = driver.aggregate_graphs(self.graphs) + # + # fig, axarr = plt.subplots(1, 3, figsize=(15, 5), sharex=True, sharey=True) + # [ax.set_aspect('equal') for ax in axarr] + # [ax.invert_yaxis() for ax in axarr] + # axarr[0].set_title("g single") + # axarr[1].set_title("G_agg_naive") + # axarr[2].set_title("G_agg_cvpr") + # [visualize_graph(g, axarr[0], node_color="g", edge_color="g") for g in self.graphs] + # visualize_graph(self.G_agg_naive, axarr[1], node_color="b", edge_color="b") + # visualize_graph(G_agg_cvpr, axarr[2], node_color="r", edge_color="r") + # plt.show() + + print(" Pose queue size: {}".format(len(self.future_poses))) if len(self.future_poses) == 0: print("future_poses empty. Exiting.") self.done = True return - else: - # reorder queue based on distance to current pose - self.future_poses.sort(key=lambda x: np.linalg.norm(x[0:2] - self.pose[0:2])) + # reorder queue based on distance to current pose + self.future_poses.sort(key=lambda x: np.linalg.norm(x[0:2] - self.pose[0:2])) + self.pose = self.future_poses.pop(0) + while out_of_bounds_check(self.pose, self.aerial_image.shape, oob_margin=500): + print(" pose out of bounds. removing from queue") + if len(self.future_poses) == 0: + print("future_poses empty. Exiting.") + self.done = True + break self.pose = self.future_poses.pop(0) - while out_of_bounds_check(self.pose, self.aerial_image.shape, oob_margin=500): - print(" pose out of bounds. removing from queue") - if len(self.future_poses) == 0: - print("future_poses empty. Exiting.") - self.done = True - break - self.pose = self.future_poses.pop(0) - print(" get pose from queue: {:.0f}, {:.0f}, {:.1f}".format(self.pose[0], self.pose[1], self.pose[2])) + print(" get pose from queue: {:.0f}, {:.0f}, {:.1f}".format(self.pose[0], self.pose[1], self.pose[2])) self.pose[2] = self.yaw_check(self.pose[2]) self.make_step() - cv2.waitKey(1) + cv2.waitKey(waitkey_ms) def cleanup(self): cv2.destroyAllWindows() @@ -953,11 +836,11 @@ def cleanup(self): # tile_id = "austin_83_34021_46605" - tile_id = "pittsburgh_36_27706_11407" + #tile_id = "pittsburgh_36_27706_11407" + tile_id = 'pittsburgh_19_12706_31407' - - driver = AerialDriver(debug=False, input_layers=input_layers, tile_id=tile_id) + driver = AerialDriver(debug=True, input_layers=input_layers, tile_id=tile_id) # driver.load_model(model_path="/data/autograph/checkpoints/clean-hill-97/e-014.pth", # (austin only) # type="full") @@ -967,12 +850,11 @@ def cleanup(self): driver.load_model(model_path="/data/autograph/checkpoints/civilized-bothan-187/e-150.pth", # (all-3004) type="full") - driver.load_model(model_path="/data/autograph/checkpoints/jumping-spaceship-188/e-030.pth", # (all-3004) + driver.load_model(model_path="/data/autograph/checkpoints/jumping-spaceship-188/e-040.pth", # (all-3004) type="successor", input_layers=input_layers, ) - driver.load_satellite(impath=glob.glob("/data/lanegraph/urbanlanegraph-dataset-dev/*/tiles/*/{}.png".format(tile_id))[0]) while True: @@ -982,28 +864,26 @@ def cleanup(self): break - - # # load files from disk - # with open("/home/zuern/Desktop/autograph/tmp/G_agg/graphs_all.pickle", "rb") as f: - # graphs = pickle.load(f) - # with open("/home/zuern/Desktop/autograph/tmp/G_agg/G_agg_naive_all.pickle", "rb") as f: - # G_agg_naive = pickle.load(f) - # - # G_agg_cvpr = driver.aggregate_graphs(graphs) - # driver.visualize_write_G_agg(G_agg_cvpr, "G_agg_cvpr") - # driver.visualize_write_G_agg(G_agg_naive, "G_agg_naive") - # - # - # fig, axarr = plt.subplots(1, 3, figsize=(15, 5), sharex=True, sharey=True) - # img = cv2.cvtColor(driver.aerial_image, cv2.COLOR_BGR2RGB) - # [ax.imshow(img) for ax in axarr] - # axarr[0].set_title("g single") - # axarr[1].set_title("G_agg_naive") - # axarr[2].set_title("G_agg_cvpr") - # [visualize_graph(g, axarr[0], node_color=np.random.rand(3), edge_color=np.random.rand(3)) for g in graphs] - # visualize_graph(driver.G_agg_naive, axarr[1], node_color="b", edge_color="b") - # visualize_graph(G_agg_cvpr, axarr[2], node_color="r", edge_color="r") - # plt.show() + # load files from disk + with open("/home/zuern/Desktop/autograph/tmp/G_agg/{}-graphs_all.pickle".format(driver.tile_id), "rb") as f: + graphs = pickle.load(f) + with open("/home/zuern/Desktop/autograph/tmp/G_agg/{}-G_agg_naive_all.pickle".format(driver.tile_id), "rb") as f: + G_agg_naive = pickle.load(f) + + G_agg_cvpr = driver.aggregate_graphs(graphs) + driver.visualize_write_G_agg(G_agg_cvpr, "G_agg_cvpr") + driver.visualize_write_G_agg(G_agg_naive, "G_agg_naive") + + fig, axarr = plt.subplots(1, 3, figsize=(15, 5), sharex=True, sharey=True) + img = cv2.cvtColor(driver.aerial_image, cv2.COLOR_BGR2RGB) + [ax.imshow(img) for ax in axarr] + axarr[0].set_title("g single") + axarr[1].set_title("G_agg_naive") + axarr[2].set_title("G_agg_cvpr") + [visualize_graph(g, axarr[0], node_color=np.random.rand(3), edge_color=np.random.rand(3)) for g in graphs] + visualize_graph(driver.G_agg_naive, axarr[1], node_color="b", edge_color="b") + visualize_graph(G_agg_cvpr, axarr[2], node_color="r", edge_color="r") + plt.show() exit() diff --git a/driving/utils.py b/driving/utils.py index 012385d..5c2f947 100644 --- a/driving/utils.py +++ b/driving/utils.py @@ -39,6 +39,7 @@ def roundify_skeleton_graph(skeleton_graph: nx.DiGraph): # add new points and edges to the graph for i in range(0, len(pointlist) - 1): + if i == 0: point = (int(pointlist[i][0]), int(pointlist[i][1])) skeleton_graph_.add_node(point, pos=pointlist[i], weight=1.0, score=1.0) @@ -52,7 +53,6 @@ def roundify_skeleton_graph(skeleton_graph: nx.DiGraph): skeleton_graph_.add_node(point0, pos=pointlist[i], weight=1.0, score=1.0) point1 = (int(pointlist[i + 1][0]), int(pointlist[i + 1][1])) skeleton_graph_.add_node(point1, pos=pointlist[i + 1], weight=1.0, score=1.0) - skeleton_graph_.add_edge(point0, point1) skeleton_graph_.remove_edge(edge[0], edge[1]) @@ -69,7 +69,21 @@ def skeletonize_prediction(pred_succ, threshold=0.5): # first, convert to binary pred_succ_thrshld = (pred_succ > threshold).astype(np.uint8) - #cv2.imshow("pred_succ_thrshld", pred_succ_thrshld * 255) + # colorize with 0.1 bins + bins = np.arange(0, 1.1, 0.1) + bins_viz = np.digitize(pred_succ, bins) / len(bins) + + + # colorize + bins_viz = cv2.applyColorMap((bins_viz * 255).astype(np.uint8), cv2.COLORMAP_JET) + + # visualize pred_succ_thrshld + # cv2.imshow("bins_viz", bins_viz) + + + + + # cv2.imshow("pred_succ_thrshld", pred_succ_thrshld * 255) # then, skeletonize skeleton = skeletonize(pred_succ_thrshld) @@ -80,6 +94,8 @@ def skeletonize_prediction(pred_succ, threshold=0.5): skeleton[:, :N] = 0 skeleton[:, -N:] = 0 + skeleton[-20:, :] = 0 # TODO: does this work? + return skeleton @@ -126,6 +142,10 @@ def skeleton_to_graph(skeleton): graph[s][e]['pts'] = np.flip(graph[e][s]['pts'], axis=0) graph.remove_edge(e, s) + + # remove selfloops + graph.remove_edges_from(nx.selfloop_edges(graph)) + return graph diff --git a/evaluate.py b/evaluate.py index fa4a02b..7ff6897 100644 --- a/evaluate.py +++ b/evaluate.py @@ -14,7 +14,7 @@ ] -def evaluate_successor_lgp(graphs_gt, graphs_pred): +def evaluate_successor_lgp(graphs_gt, graphs_pred, split): '''Evaluate the successor graph prediction task.''' @@ -29,7 +29,6 @@ def evaluate_successor_lgp(graphs_gt, graphs_pred): ] metrics_all = {} - split = "eval" for city in city_names: metrics_all[city] = {} @@ -38,7 +37,7 @@ def evaluate_successor_lgp(graphs_gt, graphs_pred): for sample_id in graphs_gt[city][split]: metrics_all[city][split][sample_id] = {} - print("Successor-LGP evaluating sample", sample_id) + # print("Successor-LGP evaluating sample", sample_id) if not sample_id in graphs_pred[city][split]: print("No prediction for sample", sample_id) @@ -81,7 +80,7 @@ def evaluate_successor_lgp(graphs_gt, graphs_pred): return metrics_all -def evaluate_full_lgp(graphs_gt, graphs_pred): +def evaluate_full_lgp(graphs_gt, graphs_pred, split): metric_names = ["TOPO Precision", "TOPO Recall", @@ -92,8 +91,6 @@ def evaluate_full_lgp(graphs_gt, graphs_pred): ] metrics_all = {} - split = "eval" - metrics_all[split] = {} for city in city_names: @@ -149,11 +146,10 @@ def evaluate_full_lgp(graphs_gt, graphs_pred): return metrics_all -def evaluate_planning(graphs_gt, graphs_pred): +def evaluate_planning(graphs_gt, graphs_pred, split): metric_names = ["MMD", "MED", "SR"] metrics_all = {} - split = "eval" metrics_all[split] = {} @@ -208,7 +204,7 @@ def evaluate_planning(graphs_gt, graphs_pred): return metrics_all -def evaluate(annotation_file, user_submission_file, phase_codename, **kwargs): +def evaluate(annotation_file, user_submission_file, phase_codename, split, **kwargs): with open(annotation_file, 'rb') as f: graphs_gt = pickle.load(f) @@ -219,14 +215,12 @@ def evaluate(annotation_file, user_submission_file, phase_codename, **kwargs): output = {} if phase_codename == "phase_successor_lgp": print("%%%%%%%%%%%%%%%%%%%%%\n%%%%%%\tEvaluating for Phase: phase_successor_lgp\n%%%%%%%%%%%%%%%%%%%%%") - out_dict = evaluate_successor_lgp(graphs_gt, graphs_pred) - - pprint.pprint(out_dict) + out_dict = evaluate_successor_lgp(graphs_gt, graphs_pred, split) # this goes to the leaderboard (average of all cities - metrics_successor = out_dict["eval"]["avg"] + metrics_successor = out_dict[split]["avg"] - output["result"] = [{"eval_split_succ": metrics_successor}] + output["result"] = [{"{}_split_succ".format(split): metrics_successor}] # To display the results in the result file (all cities) output["submission_result"] = out_dict @@ -234,14 +228,12 @@ def evaluate(annotation_file, user_submission_file, phase_codename, **kwargs): elif phase_codename == "phase_full_lgp": print("%%%%%%%%%%%%%%%%%%%%%\n%%%%%%\tEvaluating for Phase: phase_full_lgp\n%%%%%%%%%%%%%%%%%%%%%") - out_dict = evaluate_full_lgp(graphs_gt, graphs_pred) - - pprint.pprint(out_dict) + out_dict = evaluate_full_lgp(graphs_gt, graphs_pred, split) # the average over all cities for the eval split is this dict entry: - metrics_full = out_dict["eval"]["avg"] + metrics_full = out_dict[split]["avg"] - output["result"] = [{"eval_split_full": metrics_full}] + output["result"] = [{"{}_split_full".format(split): metrics_full}] # To display the results in the result file output["submission_result"] = output["result"][0] @@ -249,14 +241,12 @@ def evaluate(annotation_file, user_submission_file, phase_codename, **kwargs): elif phase_codename == "phase_planning": print("%%%%%%%%%%%%%%%%%%%%%\n%%%%%%\tEvaluating for Phase: phase_planning\n%%%%%%%%%%%%%%%%%%%%%") - out_dict = evaluate_planning(graphs_gt, graphs_pred) - - pprint.pprint(out_dict) + out_dict = evaluate_planning(graphs_gt, graphs_pred, split) # the average over all cities for the eval split is this dict entry: - metrics_planning = out_dict["eval"]["avg"] + metrics_planning = out_dict[split]["avg"] - output["result"] = [{"eval_split_planning": metrics_planning}] + output["result"] = [{"{}_split_planning".format(split): metrics_planning}] # To display the results in the result file output["submission_result"] = output["result"][0] @@ -279,7 +269,8 @@ def evaluate(annotation_file, user_submission_file, phase_codename, **kwargs): # # Task: Full LGP, Eval Split results_dict = evaluate(annotation_file="annotations_full_lgp_eval.pickle", user_submission_file="/home/zuern/Desktop/autograph/tmp/G_agg/0011_G_agg_cvpr.pickle", - phase_codename="phase_full_lgp") + phase_codename="phase_full_lgp", + split="eval") # # # Task: Planning, Eval Split # results_dict = evaluate(annotation_file="annotations_full_lgp_eval.pickle", diff --git a/inference_regressor.py b/inference_regressor.py index a40f902..266b21e 100644 --- a/inference_regressor.py +++ b/inference_regressor.py @@ -12,8 +12,11 @@ import os from evaluate import evaluate from tqdm import tqdm +import matplotlib.pyplot as plt import pprint -from driving.utils import aggregate, colorize, skeleton_to_graph, skeletonize_prediction, roundify_skeleton_graph +from driving.utils import skeleton_to_graph, skeletonize_prediction, roundify_skeleton_graph +from random import shuffle +import pandas as pd class FormatPrinter(pprint.PrettyPrinter): @@ -36,8 +39,8 @@ def visualize_graph(G, ax, aerial_image, node_color=np.array([255, 0, 142])/255. edge_color=node_color, node_color=edge_color, with_labels=False, - node_size=5, - arrowsize=15.0, ) + node_size=3, + arrowsize=8.0, ) def load_full_model(model_path): @@ -94,37 +97,38 @@ def load_succ_model(model_path, full_model=False, input_layers="rgb+drivable+ang return model_succ -def run_successor_lgp(picklefile): - - split = "eval" +def run_successor_lgp(full_model_pth, succ_model_pth, input_layers, picklefile, split): # Image folder test_images = sorted(glob("/data/lanegraph/urbanlanegraph-dataset-dev/*/successor-lgp/{}/*-rgb.png".format(split))) test_graphs = sorted(glob("/data/lanegraph/urbanlanegraph-dataset-dev/*/successor-lgp/{}/*.gpickle".format(split))) + # test_images = sorted(glob("//data/autograph/all-3004/lanegraph/pittsburgh/test/branching/*-rgb.png")) + # test_graphs = sorted(glob("/data/lanegraph/urbanlanegraph-dataset-dev/*/successor-lgp/{}/*.gpickle".format(split))) - # full model - full_model_pth = "/data/autograph/checkpoints/civilized-bothan-187/e-150.pth" # full model tracklets - #full_model_pth = "/data/autograph/checkpoints/civilized-bothan-187/e-150.pth" # full model lanegraph - - # succ model - succ_model = "/data/autograph/checkpoints/cosmic-feather-189/e-010.pth" # tracklets_joint rgb - input_layers = "rgb" - - # succ_model = "/data/autograph/checkpoints/jumping-spaceship-188/e-030.pth" # tracklets_joint rgb+drivable+angles - # input_layers = "rgb+drivable+angles" + # shuffle(test_images) + # # jointly shuffle them + joint = list(zip(test_images, test_graphs)) + np.random.shuffle(joint) + test_images, test_graphs = zip(*joint) # Load model model_full = load_full_model(model_path=full_model_pth) - model_succ = load_succ_model(model_path=succ_model, + model_succ = load_succ_model(model_path=succ_model_pth, full_model=True, input_layers=input_layers) pred_dict = {} - for test_image, test_graph in tqdm(zip(test_images, test_graphs), total=len(test_images), desc="Testing samples"): + images = [] + images_succ = [] + graphs_pred = [] + graphs_gt = [] + - # print("Loading sample: {}".format(test_image)) + for image_counter, (test_image, test_graph) in tqdm(enumerate(zip(test_images, test_graphs)), + total=len(test_images), + desc="Inference on samples"): sample_id = os.path.basename(test_image).replace("-rgb.png", "") @@ -155,8 +159,7 @@ def run_successor_lgp(picklefile): pred_angles = torch.nn.Tanh()(pred[0:1, 0:2, :, :]) pred_drivable = torch.nn.Sigmoid()(pred[0:1, 2:3, :, :]) - - if input_layers == "rgb": # rgb [3], pos_enc [3], pred_drivable [1], pred_angles [2] + if input_layers == "rgb": in_tensor = rgb_torch elif input_layers == "rgb+drivable": in_tensor = torch.cat([rgb_torch, pred_drivable], dim=1) @@ -174,8 +177,9 @@ def run_successor_lgp(picklefile): pred_succ = torch.nn.Sigmoid()(pred_succ) pred_succ = pred_succ[0, 0].cpu().detach().numpy() - skeleton = skeletonize_prediction(pred_succ, threshold=0.05) + skeleton = skeletonize_prediction(pred_succ, threshold=0.15) succ_graph = skeleton_to_graph(skeleton) + succ_graph = roundify_skeleton_graph(succ_graph) @@ -185,56 +189,115 @@ def run_successor_lgp(picklefile): pred_dict[city_name][split][sample_id] = succ_graph - # # Visualize - # fig, ax = plt.subplots(1, 3, figsize=(15, 5), sharex=True, sharey=True) - # img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) - # visualize_graph(gt_graph, ax[0], aerial_image=img, node_color='white', edge_color='white') - # visualize_graph(succ_graph, ax[1], aerial_image=img) - # visualize_graph(gt_graph, ax[2], aerial_image=img, node_color='white', edge_color='white') - # visualize_graph(succ_graph, ax[2], aerial_image=img) - # ax[1].imshow(pred_succ, vmin=0, alpha=0.5) - # plt.show() + + images.append(img) + images_succ.append(np.digitize(pred_succ, np.arange(0, 1.1, 0.1))) + graphs_pred.append(succ_graph) + graphs_gt.append(gt_graph) + + # Visualize + print(sample_id) + plot_every = 10 + if image_counter % plot_every == 0 and image_counter > 0: + fig, ax = plt.subplots(plot_every, 4, sharex=True, sharey=True, figsize=(10, 30)) + plt.tight_layout() + plt.subplots_adjust(wspace=0, hspace=0) + for i in range(plot_every): + ax[i, 0].axis("off") + ax[i, 1].axis("off") + ax[i, 2].axis("off") + ax[i, 3].axis("off") + ax[i, 0].set_title(sample_id) + img = cv2.cvtColor(images[image_counter-i], cv2.COLOR_BGR2RGB) + visualize_graph(graphs_gt[image_counter-i], ax[i, 0], aerial_image=img, node_color='white', edge_color='white') + visualize_graph(graphs_pred[image_counter-i], ax[i, 1], aerial_image=img) + visualize_graph(graphs_gt[image_counter-i], ax[i, 2], aerial_image=img, node_color='white', edge_color='white') + visualize_graph(graphs_pred[image_counter-i], ax[i, 2], aerial_image=img) + ax[i, 3].imshow(images_succ[image_counter-i], vmin=1.1, cmap="jet") + plt.savefig("/home/zuern/Desktop/autograph/eval_succ/viz/{:04d}.svg".format(image_counter)) + # exit() pickle.dump(pred_dict, open(picklefile, "wb")) if __name__ == "__main__": - split = "eval" + split = "test" - predictions_file = 'succ_lgp_eval_autograph.pickle' - run_successor_lgp(picklefile=predictions_file) - - results_dict = evaluate(annotation_file="/home/zuern/lanegnn-dev/urbanlanegraph_evaluator/annotations_successor_lgp_eval.pickle", - user_submission_file=predictions_file, - phase_codename="phase_successor_lgp") - - print("austin") - for k,v in results_dict['submission_result']["austin"][split]["avg"].items(): - print(" {}: {:.3f}".format(k, v)) - - print("detroit") - for k,v in results_dict['submission_result']["detroit"][split]["avg"].items(): - print(" {}: {:.3f}".format(k, v)) - - print("miami") - for k,v in results_dict['submission_result']["miami"][split]["avg"].items(): - print(" {}: {:.3f}".format(k, v)) - - print("paloalto") - for k,v in results_dict['submission_result']["paloalto"][split]["avg"].items(): - print(" {}: {:.3f}".format(k, v)) - - print("pittsburgh") - for k,v in results_dict['submission_result']["pittsburgh"][split]["avg"].items(): - print(" {}: {:.3f}".format(k, v)) - - print("washington") - for k,v in results_dict['submission_result']["washington"][split]["avg"].items(): - print(" {}: {:.3f}".format(k, v)) + # full model + full_model_pth = "/data/autograph/checkpoints/civilized-bothan-187/e-150.pth" # full model tracklets + #full_model_pth = "/data/autograph/checkpoints/civilized-bothan-187/e-150.pth" # full model lanegraph + # succ model dict + model_dicts = [ + {"model_path": "/data/autograph/checkpoints/jumping-spaceship-188/e-040.pth", + "model_notes": "tracklets_joint|successor|rgb+drivable+angles", + "input_layers": "rgb+drivable+angles"}, + + {"model_path": "/data/autograph/checkpoints/tough-blaze-198/e-008.pth", + "model_notes": "tracklets_joint|successor|rgb", + "input_layers": "rgb"}, + + {"model_path": "/data/autograph/checkpoints/hardy-frog-198/e-012.pth", + "model_notes": "tracklets_joint|successor|rgb+drivable", + "input_layers": "rgb+drivable"}, + + {"model_path": "/data/autograph/checkpoints/splendid-breeze-198/e-010.pth", + "model_notes": "tracklets_joint|successor|rgb+drivable+angles", + "input_layers": "rgb+drivable+angles"}, + + {"model_path": "/data/autograph/checkpoints/dandy-cherry-199/e-032.pth", + "model_notes": "tracklets_raw|successor|rgb", + "input_layers": "rgb"}, + ] + + results_df = pd.DataFrame(columns=["model_name", "model_notes", "split", "iou", "apls", "geo_precision", + "geo_recall","topo_precision","topo_recall","sda@20","sda@50"]) + + for model_dict in model_dicts: + succ_model_pth = model_dict["model_path"] + model_notes = model_dict["model_notes"] + input_layers = model_dict["input_layers"] + + + model_name = succ_model_pth.split("/")[-2:] + model_name = "_".join(model_name) + model_identifier = model_name + "_" + model_notes + "_" + split + + predictions_file = '/home/zuern/Desktop/autograph/eval_succ/{}_predictions.pickle'.format(model_identifier) + run_successor_lgp(full_model_pth=full_model_pth, + succ_model_pth=succ_model_pth, + input_layers=input_layers, + picklefile=predictions_file, + split=split) + + results_dict = evaluate(annotation_file="/home/zuern/lanegnn-dev/urbanlanegraph_evaluator/annotations_successor_lgp_{}.pickle".format(split), + user_submission_file=predictions_file, + phase_codename="phase_successor_lgp", + split=split,) + + print("avg") + for k, v in results_dict['submission_result'][split]["avg"].items(): + print(" {}: {:.3f}".format(k, v)) + + # save dict + pickle.dump(results_dict, open("/home/zuern/Desktop/autograph/eval_succ/{}_results_dict.pickle".format(model_identifier), "wb")) + + # save results + results_df = results_df.append({"model_name": model_name, + "model_notes": model_notes, + "split": split, + "iou": results_dict['submission_result'][split]["avg"]["Graph IoU"], + "apls": results_dict['submission_result'][split]["avg"]["APLS"], + "geo_precision": results_dict['submission_result'][split]["avg"]["GEO Precision"], + "geo_recall": results_dict['submission_result'][split]["avg"]["GEO Recall"], + "topo_precision": results_dict['submission_result'][split]["avg"]["TOPO Precision"], + "topo_recall": results_dict['submission_result'][split]["avg"]["TOPO Recall"], + "sda@20": results_dict['submission_result'][split]["avg"]["SDA20"], + "sda@50": results_dict['submission_result'][split]["avg"]["SDA50"] + }, + ignore_index=True) + + results_df.to_csv("/home/zuern/Desktop/autograph/eval_succ/results_all.csv", index=False) - print("avg") - for k,v in results_dict['submission_result'][split]["avg"].items(): - print(" {}: {:.3f}".format(k, v)) diff --git a/training-rlgpu.sh b/training-rlgpu.sh index 64f8169..84a11eb 100644 --- a/training-rlgpu.sh +++ b/training-rlgpu.sh @@ -1,57 +1,61 @@ export WANDB_API_KEY=8eca0f0d4e3d49c3728c9aa0e00b316c2d80012f export DATASET_NAME=all-3004 -## TrackletNet gt-supervised -#CUDA_VISIBLE_DEVICES=0,1 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ +# TrackletNet gt-supervised +CUDA_VISIBLE_DEVICES=0,1,2 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ + --config cfg-rlgpu.yaml \ + --target full \ + --dataset_name $DATASET_NAME/lanegraph + +sleep 1 + +# TrackletNet tracklet-supervised +CUDA_VISIBLE_DEVICES=3,4,5 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ + --config cfg-rlgpu.yaml \ + --target full \ + --dataset_name $DATASET_NAME/tracklets_joint + + + + +## SuccNet tracklets_raw supervised (no TrackletNet) +#CUDA_VISIBLE_DEVICES=0 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ # --config cfg-rlgpu.yaml \ -# --target full \ -# --dataset_name $DATASET_NAME/lanegraph +# --target successor \ +# --input_layers rgb \ +# --dataset_name $DATASET_NAME/tracklets_raw & # #sleep 1 # -## TrackletNet tracklet-supervised -#CUDA_VISIBLE_DEVICES=2,3 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ +## SuccNet tracklets_joint supervised (no TrackletNet) +#CUDA_VISIBLE_DEVICES=1 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ # --config cfg-rlgpu.yaml \ -# --target full \ -# --dataset_name $DATASET_NAME/tracklets_raw +# --target successor \ +# --input_layers rgb \ +# --dataset_name $DATASET_NAME/tracklets_joint & # +#sleep 1 # +## SuccNet tracklets_joint supervised (TrackletNet D) +#CUDA_VISIBLE_DEVICES=2 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ +# --config cfg-rlgpu.yaml \ +# --target successor \ +# --input_layers rgb+drivable \ +# --full-checkpoint checkpoints/civilized-bothan-187/e-150.pth \ +# --dataset_name $DATASET_NAME/tracklets_joint & +# +#sleep 1 +# +## SuccNet tracklets_joint supervised (TrackletNet D + A) +#CUDA_VISIBLE_DEVICES=3 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ +# --config cfg-rlgpu.yaml \ +# --target successor \ +# --input_layers rgb+drivable+angles \ +# --full-checkpoint checkpoints/civilized-bothan-187/e-150.pth \ +# --dataset_name $DATASET_NAME/tracklets_joint -# SuccNet tracklets_raw supervised (no TrackletNet) -CUDA_VISIBLE_DEVICES=0 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ - --config cfg-rlgpu.yaml \ - --target successor \ - --input_layers rgb \ - --dataset_name $DATASET_NAME/tracklets_raw & - -sleep 1 - -# SuccNet tracklets_joint supervised (no TrackletNet) -CUDA_VISIBLE_DEVICES=1 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ - --config cfg-rlgpu.yaml \ - --target successor \ - --input_layers rgb \ - --dataset_name $DATASET_NAME/tracklets_joint & - -sleep 1 - -# SuccNet tracklets_joint supervised (TrackletNet D) -CUDA_VISIBLE_DEVICES=2 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ - --config cfg-rlgpu.yaml \ - --target successor \ - --input_layers rgb+drivable \ - --full-checkpoint checkpoints/civilized-bothan-187/e-150.pth \ - --dataset_name $DATASET_NAME/tracklets_joint & -sleep 1 -# SuccNet tracklets_joint supervised (TrackletNet D + A) -CUDA_VISIBLE_DEVICES=3 WANDB_API_KEY=$WANDB_API_KEY ~/zuern/geometric/bin/python train_regressor_pos_query.py \ - --config cfg-rlgpu.yaml \ - --target successor \ - --input_layers rgb+drivable+angles \ - --full-checkpoint checkpoints/civilized-bothan-187/e-150.pth \ - --dataset_name $DATASET_NAME/tracklets_joint