From b4828ae39105fe61112c7676bef7fe98be041094 Mon Sep 17 00:00:00 2001 From: mhubii Date: Thu, 31 Aug 2023 17:04:07 +0100 Subject: [PATCH] updated demo --- whisper_demos/whisper_demos/whisper_on_key.py | 13 ++++--------- whisper_server/src/inference_node.cpp | 12 +----------- 2 files changed, 5 insertions(+), 20 deletions(-) diff --git a/whisper_demos/whisper_demos/whisper_on_key.py b/whisper_demos/whisper_demos/whisper_on_key.py index 2ebcc39..2d4312e 100644 --- a/whisper_demos/whisper_demos/whisper_on_key.py +++ b/whisper_demos/whisper_demos/whisper_on_key.py @@ -5,6 +5,7 @@ from rclpy.node import Node from rclpy.task import Future from whisper_msgs.action import Inference +from whisper_msgs.action._inference import Inference_FeedbackMessage class WhisperOnKey(Node): @@ -13,7 +14,6 @@ def __init__(self, node_name: str) -> None: # whisper self.whisper_client = ActionClient(self, Inference, "/whisper/inference") - self.feedback = Inference.Feedback() while not self.whisper_client.wait_for_server(1): self.get_logger().warn( @@ -28,7 +28,7 @@ def __init__(self, node_name: str) -> None: self.get_logger().info(self.info_string()) - def on_key(self, key) -> None: + def on_key(self, key: Key) -> None: if key == Key.esc: self.key_listener.stop() rclpy.shutdown() @@ -65,13 +65,8 @@ def on_done(self, future: Future) -> None: result: Inference.Result = future.result().result self.get_logger().info(f"Result: {result.text}") - def on_feedback(self, feedback_msg) -> None: - prefix = "" - if feedback_msg.feedback.batch_idx != self.feedback.batch_idx: - prefix = "\n" - self.feedback = feedback_msg.feedback - # print(f"{prefix}{self.feedback.text}", end="\r") - self.get_logger().info(f"{self.feedback.text}") + def on_feedback(self, feedback_msg: Inference_FeedbackMessage) -> None: + self.get_logger().info(f"{feedback_msg.feedback.text}") def info_string(self) -> str: return ( diff --git a/whisper_server/src/inference_node.cpp b/whisper_server/src/inference_node.cpp index 2790bd7..93b520b 100644 --- a/whisper_server/src/inference_node.cpp +++ b/whisper_server/src/inference_node.cpp @@ -113,13 +113,6 @@ void InferenceNode::on_inference_accepted_(const std::shared_ptrnow() - loop_start_time < goal_handle->get_goal()->max_duration) { - // if (goal_handle->get_goal()->max_duration.sec != 0 && // run until goal is canceled - // goal_handle->get_goal()->max_duration.nanosec != 0 && - // node_ptr_->now() - loop_start_time < goal_handle->get_goal()->max_duration) { - // RCLCPP_INFO(node_ptr_->get_logger(), "Exiting inference on time limit."); - // break; - // } - // run inference auto text = inference_(batched_buffer_.dequeue()); @@ -128,14 +121,11 @@ void InferenceNode::on_inference_accepted_(const std::shared_ptrtext.push_back(feedback->text); } feedback->text = text; + feedback->batch_idx = batched_buffer_.batch_idx(); goal_handle->publish_feedback(feedback); } running_inference_ = false; - // goal_handle->canceled - // goal_handle->is_canceling - // goal_handle->publish_feedback - goal_handle->succeed(result); }