Skip to content

Commit

Permalink
Fix Windows build (octoml#190)
Browse files Browse the repository at this point in the history
filesystem path cannot be implicitly converted to string
  • Loading branch information
tqchen authored May 19, 2023
1 parent f65df32 commit 6de9506
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 7 deletions.
2 changes: 1 addition & 1 deletion 3rdparty/tokenizers-cpp
Submodule tokenizers-cpp updated 1 files
+1 −1 CMakeLists.txt
3 changes: 2 additions & 1 deletion cpp/cli_main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,8 @@ ModelPaths ModelPaths::Find(const std::string& artifact_path, const std::string&
std::cout << "Use MLC config: " << config_path << std::endl;
// Step 2. Find parameters
std::filesystem::path params_json;
if (auto path = FindFile({config_path.parent_path()}, {"ndarray-cache"}, {".json"})) {
if (auto path = FindFile(
{config_path.parent_path().string()}, {"ndarray-cache"}, {".json"})) {
params_json = path.value();
} else {
std::cerr << "Cannot find \"ndarray-cache.json\" for params: " << config_path.parent_path()
Expand Down
10 changes: 5 additions & 5 deletions cpp/llm_chat.cc
Original file line number Diff line number Diff line change
Expand Up @@ -427,11 +427,11 @@ std::unique_ptr<Tokenizer> TokenizerFromPath(const std::string& _path) {
std::filesystem::path vocab_path = path / "vocab.json";
std::filesystem::path added_tokens_path = path / "added_tokens.json";
if (std::filesystem::exists(merges_path) && std::filesystem::exists(vocab_path)) {
std::string vocab = LoadBytesFromFile(vocab_path);
std::string merges = LoadBytesFromFile(merges_path);
std::string vocab = LoadBytesFromFile(vocab_path.string());
std::string merges = LoadBytesFromFile(merges_path.string());
std::string added_tokens = "";
if (std::filesystem::exists(added_tokens_path)) {
added_tokens = LoadBytesFromFile(added_tokens_path);
added_tokens = LoadBytesFromFile(added_tokens_path.string());
}
return Tokenizer::FromBlobByteLevelBPE(vocab, merges, added_tokens);
}
Expand All @@ -441,10 +441,10 @@ std::unique_ptr<Tokenizer> TokenizerFromPath(const std::string& _path) {
huggingface = path.parent_path() / "tokenizer.json";
}
if (std::filesystem::exists(sentencepiece)) {
return Tokenizer::FromBlobSentencePiece(LoadBytesFromFile(sentencepiece));
return Tokenizer::FromBlobSentencePiece(LoadBytesFromFile(sentencepiece.string()));
}
if (std::filesystem::exists(huggingface)) {
return Tokenizer::FromBlobJSON(LoadBytesFromFile(huggingface));
return Tokenizer::FromBlobJSON(LoadBytesFromFile(huggingface.string()));
}
LOG(FATAL) << "Cannot find any tokenizer under: " << _path;
}
Expand Down

0 comments on commit 6de9506

Please sign in to comment.