26 #include "torch/extension.h" 27 #include "torch/script.h" 40 const LearningModelInferenceTaskConfig& config);
50 std::string
GetName()
override {
return "TRAJECTORY_IMITATION_INFERENCE"; };
61 bool DoInference(LearningDataFrame*
const learning_data_frame)
override;
72 bool LoadCNNLSTMModel();
78 bool DoCNNMODELInference(LearningDataFrame*
const learning_data_frame);
84 bool DoCNNLSTMMODELInference(LearningDataFrame*
const learning_data_frame);
89 void output_postprocessing(
const at::Tensor& torch_output_tensor,
90 LearningDataFrame*
const learning_data_frame);
92 torch::jit::script::Module model_;
93 torch::Device device_;
PlanningContext is the runtime context in planning. It is persistent across multiple frames...
Definition: atomic_hash_map.h:25
TrajectoryImitationLibtorchInference(const LearningModelInferenceTaskConfig &config)
Constructor.
Definition: trajectory_imitation_libtorch_inference.h:34
Definition: model_inference.h:32
Planning module main class. It processes GPS and IMU as input, to generate planning info...
Define the model inference base class.
std::string GetName() override
Get the name of model inference.
Definition: trajectory_imitation_libtorch_inference.h:50
virtual ~TrajectoryImitationLibtorchInference()=default
Destructor.
bool LoadModel() override
load a learned model
bool DoInference(LearningDataFrame *const learning_data_frame) override
inference a learned model