I'm trying to load and use a model from Python into c++ using libtorch. The program compiles correctly but I get Illegal instruction (core dumped) using forward on Input.
void test(vector<module_type>& model){
//pseudo input
vector<torch::jit::IValue> inputs;
inputs.push_back(torch::ones({1, 3, 224, 224}));
//ERROR IS HERE
at::Tensor output = model[0].forward(inputs).toTensor();
cout << output << endl;
}
int main(int argc, char *argv[]) {
if (argc == 2){
cout << argv[1] << endl;
}
else {
cerr << "no path of model is given" << endl;
return -1;
}
// test
module_type module = torch::jit::load(argv[1]);
vector<module_type> modul;
modul.push_back(module);
test(modul);
}
cmake_minimum_required(VERSION 3.0 FATAL_ERROR)
project(main)
find_package(Torch REQUIRED)
add_executable(main main.cpp)
target_link_libraries(main "${TORCH_LIBRARIES}")
set_property(TARGET main PROPERTY CXX_STANDARD 11)
1) torch::jit::load
return type is std::shared_ptr<torch::jit::script::Module>
, so your code should rather be at::Tensor output = model[0]->forward(inputs).toTensor();
2) It is possible that for some reason the export of your Python model failed, but it's hard to tell without seeing the actual python code you used. To see what how many methods are available, try :
auto module = torch::jit::load(argv[1]);
size_t number_of_methods = module->get_methods().size();
Basically, if number_of_methods
is 0, you have a problem : the serialized object contains no method (and the issue comes from your python code). Otherwise, the forward method should be available.