Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 45 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,51 @@ if(${Boost_FOUND})
set(COBS_LINK_LIBRARIES ${Boost_LIBRARIES} ${COBS_LINK_LIBRARIES})
endif()

set(COBS_LINK_LIBRARIES stdc++fs ${COBS_LINK_LIBRARIES})
include(CheckCXXSourceRuns)
set(CMAKE_REQUIRED_FLAGS " -std=c++17")
check_cxx_source_runs("
#include <iostream>
#include <experimental/filesystem>
int main() {
std::cout << std::experimental::filesystem::temp_directory_path();
return 0;
}
" CPPNOFS)
if(NOT CPPNOFS)
set(CMAKE_REQUIRED_FLAGS " -std=c++17")
set(CMAKE_REQUIRED_LIBRARIES "c++fs")
check_cxx_source_runs("
#include <iostream>
#include <experimental/filesystem>
int main() {
std::cout << std::experimental::filesystem::temp_directory_path();
return 0;
}
" CPPFS)
unset(CMAKE_REQUIRED_FLAGS)
unset(CMAKE_REQUIRED_LIBRARIES)
if(CPPFS)
set(COBS_LINK_LIBRARIES c++fs ${COBS_LINK_LIBRARIES})
else()
set(CMAKE_REQUIRED_FLAGS " -std=c++17")
set(CMAKE_REQUIRED_LIBRARIES "stdc++fs")
check_cxx_source_runs("
#include <iostream>
#include <experimental/filesystem>
int main() {
std::cout << std::experimental::filesystem::temp_directory_path();
return 0;
}
" STDCPPFS)
unset(CMAKE_REQUIRED_FLAGS)
unset(CMAKE_REQUIRED_LIBRARIES)
if(STDCPPFS)
set(COBS_LINK_LIBRARIES stdc++fs ${COBS_LINK_LIBRARIES})
else()
message(FATAL_ERROR "std::experimental::filesystem not found")
endif()
endif()
endif()

### use TLX ###

Expand Down
6 changes: 3 additions & 3 deletions cobs/query/classic_search.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -111,12 +111,12 @@ void ClassicSearch::compute_counts(
const uint8_t* rows, size_t size, size_t buffer_size)
{
#if __SSE2__
auto expansion_128 = reinterpret_cast<const __m128i_u*>(s_expansion_128);
auto expansion_128 = reinterpret_cast<const __m128i*>(s_expansion_128);
#endif
uint64_t num_hashes = index_file_.num_hashes();

#if __SSE2__
auto counts_128 = reinterpret_cast<__m128i_u*>(scores);
auto counts_128 = reinterpret_cast<__m128i*>(scores);
#else
auto counts_64 = reinterpret_cast<uint64_t*>(scores);
#endif
Expand Down Expand Up @@ -264,7 +264,7 @@ const uint64_t ClassicSearch::s_expansion[] = {
281479271677952, 281479271677953, 281479271743488, 281479271743489
};

const uint16_t ClassicSearch::s_expansion_128[] = {
alignas(16) const uint16_t ClassicSearch::s_expansion_128[] = {
0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0,
Expand Down
4 changes: 2 additions & 2 deletions src/main_performance.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ const uint16_t expansion[] = {
1, 1, 1, 1, 1, 1, 1, 1
};

const auto expansion_128 = reinterpret_cast<const __m128i_u*>(expansion);
const auto expansion_128 = reinterpret_cast<const __m128i*>(expansion);

void compute_counts(size_t hashes_size, std::vector<uint16_t>& counts, const uint8_t* rows, size_t row_size) {
#pragma omp declare reduction (merge : std::vector<uint16_t> : \
Expand All @@ -298,7 +298,7 @@ void compute_counts(size_t hashes_size, std::vector<uint16_t>& counts, const uin
#pragma omp parallel reduction(merge: counts)
{
// auto counts_64 = reinterpret_cast<uint64_t*>(counts.data());
auto counts_128 = reinterpret_cast<__m128i_u*>(counts.data());
auto counts_128 = reinterpret_cast<__m128i*>(counts.data());
#pragma omp for
for (uint64_t i = 0; i < hashes_size; i += 1) {
auto rows_8 = rows + i * row_size;
Expand Down