Skip to content

Commit

Permalink
updated demo
Browse files Browse the repository at this point in the history
  • Loading branch information
mhubii committed Aug 31, 2023
1 parent 989514e commit b4828ae
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 20 deletions.
13 changes: 4 additions & 9 deletions whisper_demos/whisper_demos/whisper_on_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 5,7 @@
from rclpy.node import Node
from rclpy.task import Future
from whisper_msgs.action import Inference
from whisper_msgs.action._inference import Inference_FeedbackMessage


class WhisperOnKey(Node):
Expand All @@ -13,7 14,6 @@ def __init__(self, node_name: str) -> None:

# whisper
self.whisper_client = ActionClient(self, Inference, "/whisper/inference")
self.feedback = Inference.Feedback()

while not self.whisper_client.wait_for_server(1):
self.get_logger().warn(
Expand All @@ -28,7 28,7 @@ def __init__(self, node_name: str) -> None:

self.get_logger().info(self.info_string())

def on_key(self, key) -> None:
def on_key(self, key: Key) -> None:
if key == Key.esc:
self.key_listener.stop()
rclpy.shutdown()
Expand Down Expand Up @@ -65,13 65,8 @@ def on_done(self, future: Future) -> None:
result: Inference.Result = future.result().result
self.get_logger().info(f"Result: {result.text}")

def on_feedback(self, feedback_msg) -> None:
prefix = ""
if feedback_msg.feedback.batch_idx != self.feedback.batch_idx:
prefix = "\n"
self.feedback = feedback_msg.feedback
# print(f"{prefix}{self.feedback.text}", end="\r")
self.get_logger().info(f"{self.feedback.text}")
def on_feedback(self, feedback_msg: Inference_FeedbackMessage) -> None:
self.get_logger().info(f"{feedback_msg.feedback.text}")

def info_string(self) -> str:
return (
Expand Down
12 changes: 1 addition & 11 deletions whisper_server/src/inference_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -113,13 113,6 @@ void InferenceNode::on_inference_accepted_(const std::shared_ptr<GoalHandleInfer

while (rclcpp::ok() &&
node_ptr_->now() - loop_start_time < goal_handle->get_goal()->max_duration) {
// if (goal_handle->get_goal()->max_duration.sec != 0 && // run until goal is canceled
// goal_handle->get_goal()->max_duration.nanosec != 0 &&
// node_ptr_->now() - loop_start_time < goal_handle->get_goal()->max_duration) {
// RCLCPP_INFO(node_ptr_->get_logger(), "Exiting inference on time limit.");
// break;
// }

// run inference
auto text = inference_(batched_buffer_.dequeue());

Expand All @@ -128,14 121,11 @@ void InferenceNode::on_inference_accepted_(const std::shared_ptr<GoalHandleInfer
result->text.push_back(feedback->text);
}
feedback->text = text;
feedback->batch_idx = batched_buffer_.batch_idx();
goal_handle->publish_feedback(feedback);
}
running_inference_ = false;

// goal_handle->canceled
// goal_handle->is_canceling
// goal_handle->publish_feedback

goal_handle->succeed(result);
}

Expand Down

0 comments on commit b4828ae

Please sign in to comment.