Skip to content
This repository was archived by the owner on Jan 24, 2024. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions benchmark/tool/C/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ include(FindPaddle)
aux_source_directory(. SRC_LIST)
add_executable(${PROJECT_NAME} ${SRC_LIST})

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")

set(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS)
set(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS)
if(ANDROID_ABI)
Expand Down
196 changes: 116 additions & 80 deletions benchmark/tool/C/inference.cc
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,12 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */

#include <paddle/capi.h>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <iostream>

#include <paddle/capi.h>
#include <stdlib.h>

inline paddle_error& operator|=(paddle_error& a, paddle_error b) {
return a = static_cast<paddle_error>(static_cast<int>(a) |
static_cast<int>(b));
}

class Timer {
public:
Timer(std::string name, int iter = 1) : name_(name), iter_(iter) {
Expand Down Expand Up @@ -53,119 +47,161 @@ void read_file(const char* file, void** buf, long* size) {
*buf = malloc(*size);
fread(*buf, 1, *size, fp);
}
fclose(fp);
} else {
std::cerr << "Error: cannot open " << file << " ." << std::endl;
}
fclose(fp);
}

int main(int argc, char* argv[]) {
// parse command line arguments
std::string predict_config;
std::string predict_model;
std::string merged_model;
int input_size;
for (int i = 1; i < argc; ++i) {
if (std::string(argv[i]) == "--predict_config") {
predict_config = std::string(argv[++i]);
} else if (std::string(argv[i]) == "--predict_model") {
predict_model = std::string(argv[++i]);
} else if (std::string(argv[i]) == "--merged_model") {
merged_model = std::string(argv[++i]);
} else if (std::string(argv[i]) == "--input_size") {
input_size = atoi(argv[++i]);
}
const char* error_string(paddle_error err) {
switch (err) {
case kPD_NULLPTR:
return "nullptr error";
case kPD_OUT_OF_RANGE:
return "out of range error";
case kPD_PROTOBUF_ERROR:
return "protobuf error";
case kPD_NOT_SUPPORTED:
return "not supported error";
case kPD_UNDEFINED_ERROR:
return "undefined error";
default:
return "";
}
}

{
Timer time("init paddle");
if (paddle_init(0, NULL) != kPD_NO_ERROR) {
std::cout << "paddle init error!" << std::endl;
}
}
#define PD_CHECK(stmt) \
do { \
paddle_error __err__ = stmt; \
if (__err__ != kPD_NO_ERROR) { \
const char* str = error_string(__err__); \
std::cerr << str << "(" << __err__ << ") in " #stmt "\n"; \
exit(__err__); \
} \
} while (0)

void init_paddle() {
Timer time("init paddle");
PD_CHECK(paddle_init(0, NULL));
}

// Create a gradient machine for inference.
paddle_gradient_machine machine;
paddle_error error = kPD_NO_ERROR;
if (!merged_model.empty()) {
Timer time("create from merged model file");
paddle_gradient_machine init(std::string merged_model_path,
std::string config_path,
std::string params_dir) {
paddle_gradient_machine machine = nullptr;
if (!merged_model_path.empty()) {
Timer time("create gradient_machine from merged model");
long size = 0;
void* buf = NULL;
read_file(merged_model.c_str(), &buf, &size);
paddle_gradient_machine_create_for_inference_with_parameters(
&machine, buf, size);
free(buf);
void* merged_model = NULL;
read_file(merged_model_path.c_str(), &merged_model, &size);
PD_CHECK(paddle_gradient_machine_create_for_inference_with_parameters(
&machine, merged_model, size));
free(merged_model);
} else {
// Reading config binary file. It is generated by `convert_protobin.sh`
if (predict_config.empty()) return -1;
long size = 0;
void* buf = NULL;
if (config_path.empty()) {
std::cerr << "Both merged_model and predict_config are null. "
<< "Please specify one of them." << std::endl;
exit(-1);
}
{
Timer time("read model config");
read_file(predict_config.c_str(), &buf, &size);
long size = 0;
void* config = NULL;
Timer time("create gradient_machine with model config");
read_file(config_path.c_str(), &config, &size);
PD_CHECK(paddle_gradient_machine_create_for_inference(
&machine, config, (int)size));
free(config);
}

error |=
paddle_gradient_machine_create_for_inference(&machine, buf, (int)size);

if (predict_model.empty()) {
error |= paddle_gradient_machine_randomize_param(machine);
if (params_dir.empty()) {
Timer time("randomize parameters");
PD_CHECK(paddle_gradient_machine_randomize_param(machine));
} else {
Timer time("load model parameter");
error |= paddle_gradient_machine_load_parameter_from_disk(
machine, predict_model.c_str());
Timer time("load parameters from disk");
PD_CHECK(paddle_gradient_machine_load_parameter_from_disk(
machine, params_dir.c_str()));
}
free(buf);
}

if (error != kPD_NO_ERROR) {
std::cout << "paddle create inference machine error!" << std::endl;
}
return machine;
}

// Create input matrix.
void infer(paddle_gradient_machine machine, int input_size) {
paddle_arguments in_args = paddle_arguments_create_none();
error |= paddle_arguments_resize(in_args, 1);
PD_CHECK(paddle_arguments_resize(in_args, 1));

// Create input matrix.
paddle_matrix mat = paddle_matrix_create(/* sample_num */ 1,
/* size */ input_size,
/* useGPU */ false);
srand(time(0));
paddle_real* array;
PD_CHECK(paddle_arguments_set_value(in_args, 0, mat));

// Get First row.
error |= paddle_matrix_get_row(mat, 0, &array);
paddle_real* array;
PD_CHECK(paddle_matrix_get_row(mat, 0, &array));

srand(time(0));
for (int i = 0; i < input_size; ++i) {
array[i] = rand() / ((float)RAND_MAX);
}

error |= paddle_arguments_set_value(in_args, 0, mat);

paddle_arguments out_args = paddle_arguments_create_none();

if (error != kPD_NO_ERROR) {
std::cout << "paddle init input data!" << std::endl;
}

error |= paddle_gradient_machine_forward(machine,
PD_CHECK(paddle_gradient_machine_forward(machine,
in_args,
out_args,
/* isTrain */ false);
/* isTrain */ false));

{
Timer time("forward time", 20);
for (int i = 0; i < 20; i++) {
error |= paddle_gradient_machine_forward(machine,
in_args,
out_args,
/* isTrain */ false);
paddle_gradient_machine_forward(machine,
in_args,
out_args,
/* isTrain */ false);
}
}

if (error != kPD_NO_ERROR) {
std::cout << "paddle forward error!" << std::endl;
}

paddle_arguments_destroy(out_args);
paddle_matrix_destroy(mat);
paddle_arguments_destroy(in_args);
paddle_gradient_machine_destroy(machine);
}

void release(paddle_gradient_machine machine) {
PD_CHECK(paddle_gradient_machine_destroy(machine));
}

int main(int argc, char* argv[]) {
// parse command line arguments
std::string merged_model_path;
std::string config_path;
std::string params_dir;
int input_size;
for (int i = 1; i < argc; ++i) {
if (std::string(argv[i]) == "--merged_model") {
merged_model_path = std::string(argv[++i]);
} else if (std::string(argv[i]) == "--config_path") {
config_path = std::string(argv[++i]);
} else if (std::string(argv[i]) == "--params_dir") {
params_dir = std::string(argv[++i]);
} else if (std::string(argv[i]) == "--input_size") {
input_size = atoi(argv[++i]);
}
}

// Initialize the environment of PaddlePaddle.
init_paddle();

// Create a gradient machine for inference.
paddle_gradient_machine machine =
init(merged_model_path, config_path, params_dir);

// Do inference with random input.
infer(machine, input_size);

// Release the resource.
release(machine);

return 0;
}