string_util: Remove MSVC workaround for converting between UTF8/UTF16

This has been fixed as of Visual Studio 2019 Version 16.2
This commit is contained in:
Morph 2021-02-08 02:55:43 -05:00 committed by FearlessTobi
parent 2ddecf35d2
commit 7806206e90

View file

@ -136,27 +136,13 @@ std::string ReplaceAll(std::string result, const std::string& src, const std::st
}
std::string UTF16ToUTF8(const std::u16string& input) {
#ifdef _MSC_VER
// Workaround for missing char16_t/char32_t instantiations in MSVC2017
std::wstring_convert<std::codecvt_utf8_utf16<__int16>, __int16> convert;
std::basic_string<__int16> tmp_buffer(input.cbegin(), input.cend());
return convert.to_bytes(tmp_buffer);
#else
std::wstring_convert<std::codecvt_utf8_utf16<char16_t>, char16_t> convert;
return convert.to_bytes(input);
#endif
}
std::u16string UTF8ToUTF16(const std::string& input) {
#ifdef _MSC_VER
// Workaround for missing char16_t/char32_t instantiations in MSVC2017
std::wstring_convert<std::codecvt_utf8_utf16<__int16>, __int16> convert;
auto tmp_buffer = convert.from_bytes(input);
return std::u16string(tmp_buffer.cbegin(), tmp_buffer.cend());
#else
std::wstring_convert<std::codecvt_utf8_utf16<char16_t>, char16_t> convert;
return convert.from_bytes(input);
#endif
}
#ifdef _WIN32