Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 8 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
cmake_minimum_required(VERSION 3.19...3.30)
project(tokenizers_cpp C CXX)

option(BUILD_SHARED_LIBS "Build tokenizers_cpp as a shared library" OFF)

set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
Expand Down Expand Up @@ -157,7 +159,12 @@ set(
src/huggingface_tokenizer.cc
src/rwkv_world_tokenizer.cc
)
add_library(tokenizers_cpp STATIC ${TOKENIZER_CPP_SRCS})

if(BUILD_SHARED_LIBS)
add_library(tokenizers_cpp SHARED ${TOKENIZER_CPP_SRCS} src/static_exports.cc)
else()
add_library(tokenizers_cpp STATIC ${TOKENIZER_CPP_SRCS})
endif()
target_include_directories(tokenizers_cpp PRIVATE sentencepiece/src)
target_include_directories(tokenizers_cpp PRIVATE msgpack/include)
target_include_directories(tokenizers_cpp PUBLIC ${TOKENIZERS_CPP_INCLUDE})
Expand Down
23 changes: 23 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,29 @@ You also need to turn on `c++17` support.

See [example](example) folder for an example CMake project.

### Build it as shared library
You can also build this library as a standalone shared library.

1. Initialize all submodules:
```sh
git submodule update --init --recursive
```
2. Create and enter the build directory:
```sh
mkdir -p build
cd build
```
3. Generate the CMake configuration:
```sh
cmake .. -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=ON
```
4. Build the project:
```sh
cmake --build . --config Release
```

When using the shared library, you can access the exported methods defined in `src/static_exports.cc`.

### Example Code

```c++
Expand Down
65 changes: 65 additions & 0 deletions src/static_exports.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#include "tokenizers_cpp.h"

// export LoadBlobJsonAndEncode(const std::string &, const std::string &, std::vector<int32_t> &)
extern "C" __declspec(dllexport) void LoadBlobJsonAndEncode(const std::string& json_blob, const std::string& text, std::vector<int32_t>& token_ids)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobJSON(json_blob);
token_ids = tokenizer->Encode(text);
return;
}

// export LoadBlobJsonAndEncodeBatch(const std::string &, const std::vector<std::string> &, std::vector<std::vector<int32_t>> &)
extern "C" __declspec(dllexport) void LoadBlobJsonAndEncodeBatch(const std::string& json_blob, const std::vector<std::string>& texts, std::vector<std::vector<int32_t>>& token_ids_batch)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobJSON(json_blob);
token_ids_batch = tokenizer->EncodeBatch(texts);
return;
}

// export LoadBlobSentencePieceAndEncode(const std::string &, const std::string &, std::vector<int32_t> &)
extern "C" __declspec(dllexport) void LoadBlobSentencePieceAndEncode(const std::string& model_blob, const std::string& text, std::vector<int32_t>& token_ids)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobSentencePiece(model_blob);
token_ids = tokenizer->Encode(text);
return;
}

// export LoadBlobSentencePieceAndEncodeBatch(const std::string &, const std::vector<std::string> &, std::vector<std::vector<int32_t>> &)
extern "C" __declspec(dllexport) void LoadBlobSentencePieceAndEncodeBatch(const std::string& model_blob, const std::vector<std::string>& texts, std::vector<std::vector<int32_t>>& token_ids_batch)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobSentencePiece(model_blob);
token_ids_batch = tokenizer->EncodeBatch(texts);
return;
}

// export LoadBlobRWKVWorldAndEncode(const std::string &, const std::string &, std::vector<int32_t> &)
extern "C" __declspec(dllexport) void LoadBlobRWKVWorldAndEncode(const std::string& model_blob, const std::string& text, std::vector<int32_t>& token_ids)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobRWKVWorld(model_blob);
token_ids = tokenizer->Encode(text);
return;
}

// export LoadBlobRWKVWorldAndEncodeBatch(const std::string &, const std::vector<std::string> &, std::vector<std::vector<int32_t>> &)
extern "C" __declspec(dllexport) void LoadBlobRWKVWorldAndEncodeBatch(const std::string& model_blob, const std::vector<std::string>& texts, std::vector<std::vector<int32_t>>& token_ids_batch)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobRWKVWorld(model_blob);
token_ids_batch = tokenizer->EncodeBatch(texts);
return;
}

// export LoadBlobByteLevelBPEAndEncode(const std::string &, const std::string &, const std::string &, const std::string &, std::vector<int32_t> &)
extern "C" __declspec(dllexport) void LoadBlobByteLevelBPEAndEncode(const std::string& vocab_blob, const std::string& merges_blob, const std::string& added_tokens, const std::string& text, std::vector<int32_t>& token_ids)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobByteLevelBPE(vocab_blob, merges_blob, added_tokens);
token_ids = tokenizer->Encode(text);
return;
}

// export LoadBlobByteLevelBPEAndEncodeBatch(const std::string &, const std::string &, const std::string &, const std::vector<std::string> &, std::vector<std::vector<int32_t>> &)
extern "C" __declspec(dllexport) void LoadBlobByteLevelBPEAndEncodeBatch(const std::string& vocab_blob, const std::string& merges_blob, const std::string& added_tokens, const std::vector<std::string>& texts, std::vector<std::vector<int32_t>>& token_ids_batch)
{
auto tokenizer = tokenizers::Tokenizer::FromBlobByteLevelBPE(vocab_blob, merges_blob, added_tokens);
token_ids_batch = tokenizer->EncodeBatch(texts);
return;
}