如何将UTF-16中的字符串转换为C 中的UTF-8
How do I convert a string in UTF-16 to UTF-8 in C++
考虑:
STDMETHODIMP CFileSystemAPI::setRRConfig( BSTR config_str, VARIANT* ret )
{
mReportReaderFactory.reset( new sbis::report_reader::ReportReaderFactory() );
USES_CONVERSION;
std::string configuration_str = W2A( config_str );
但是在config_str中,我在UTF-16中获得了一个字符串。如何在此代码中将其转换为UTF-8?
您可以做这样的事情
std::string WstrToUtf8Str(const std::wstring& wstr)
{
std::string retStr;
if (!wstr.empty())
{
int sizeRequired = WideCharToMultiByte(CP_UTF8, 0, wstr.c_str(), -1, NULL, 0, NULL, NULL);
if (sizeRequired > 0)
{
std::vector<char> utf8String(sizeRequired);
int bytesConverted = WideCharToMultiByte(CP_UTF8, 0, wstr.c_str(),
-1, &utf8String[0], utf8String.size(), NULL,
NULL);
if (bytesConverted != 0)
{
retStr = &utf8String[0];
}
else
{
std::stringstream err;
err << __FUNCTION__
<< " std::string WstrToUtf8Str failed to convert wstring '"
<< wstr.c_str() << L"'";
throw std::runtime_error( err.str() );
}
}
}
return retStr;
}
您可以将BSTR作为STD :: WSTRING
我实现了两个utf-8&lt; - &gt; utf-16&ltf-32之间的两个转换变体,第一个变体完全实现了从scratch中实现所有转换,第二个使用标准std::codecvt和std :: wstring_convert(这两个类是从C 17开始的,但仍然存在,也可以保证存在于C 11/C 14)。
如果您不喜欢我的代码,则可以使用几乎是标头的C 库UTFCPP,这应该由许多客户进行很好的测试。
要将UTF-8转换为UTF-16,只需调用Utf32To16(Utf8To32(str))
,然后将UTF-16转换为UTF-8调用Utf32To8(Utf16To32(str))
。或者,您可以将我的方便的助手功能UtfConv<std::wstring>(std::string("abc"))
用于UTF-8到UTF-16或UTF-16的UtfConv<std::string>(std::wstring(L"abc"))
到UTF-8,UtfConv
实际上可以转换为任何UTF编码的字符串。请参阅Test(cs)
宏中的这些和其他用法的示例。
两个变体均符合C 11。另外,它们在Clang/GCC/MSVC编译器中编译(请参阅下面的链接!请参见"在线尝试!"),并测试以在Windows/Linux OS中工作。
您必须使用UTF-8编码将我的两个代码片段保存在文件中,并将选项 -finput-charset=UTF-8 -fexec-charset=UTF-8
和clang/gcc的选项以及选项 /utf-8
交给MSVC。仅当您将具有非ASCII字符的文字字符串放置在我的代码中仅用于测试目的时,才需要此保存和选项。要使用函数本身,您不需要此UTF-8保存和选项。
<windows.h>
和<clocale>
和<iostream>
的夹杂物,还需要调用SetConsoleOutputCP(65001)
和std::setlocale(LC_ALL, "en_US.UTF-8")
,仅用于测试目的才能正确设置和输出到UTF-8控制台。这些转换功能不需要这些东西。
代码的一部分不是非常必要的,我的意思是与UTFHELPER相关的结构和功能,它们只是用于转换的辅助功能,主要是在跨平台Way std::wstring
中创建的,因为wchar_t
通常在Linux上为32位,而16 - 窗户上的位置。只有低级功能Utf8To32
,Utf32To8
,Utf16To32
,Utf32To16
是转换真正需要的唯一内容。
变体1是根据UTF-8和UTF-16编码的Wikipedia描述创建的。
如果您发现错误或任何改进(尤其是在变体中),请告诉我,我将修复它们。
变体1
在线尝试!
#include <string>
#include <iostream>
#include <stdexcept>
#include <type_traits>
#include <cstdint>
#ifdef _WIN32
#include <windows.h>
#else
#include <clocale>
#endif
#define ASSERT_MSG(cond, msg) { if (!(cond)) throw std::runtime_error("Assertion (" #cond ") failed at line " + std::to_string(__LINE__) + "! Msg: " + std::string(msg)); }
#define ASSERT(cond) ASSERT_MSG(cond, "")
template <typename U8StrT = std::string>
inline static U8StrT Utf32To8(std::u32string const & s) {
static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
typedef typename U8StrT::value_type VT;
typedef uint8_t u8;
U8StrT r;
for (auto c: s) {
size_t nby = c <= 0x7FU ? 1 : c <= 0x7FFU ? 2 : c <= 0xFFFFU ? 3 : c <= 0x1FFFFFU ? 4 : c <= 0x3FFFFFFU ? 5 : c <= 0x7FFFFFFFU ? 6 : 7;
r.push_back(VT(
nby <= 1 ? u8(c) : (
(u8(0xFFU) << (8 - nby)) |
u8(c >> (6 * (nby - 1)))
)
));
for (size_t i = 1; i < nby; ++i)
r.push_back(VT(u8(0x80U | (u8(0x3FU) & u8(c >> (6 * (nby - 1 - i)))))));
}
return r;
}
template <typename U8StrT>
inline static std::u32string Utf8To32(U8StrT const & s) {
static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
typedef uint8_t u8;
std::u32string r;
auto it = (u8 const *)s.c_str(), end = (u8 const *)(s.c_str() + s.length());
while (it < end) {
char32_t c = 0;
if (*it <= 0x7FU) {
c = *it;
++it;
} else {
ASSERT((*it & 0xC0U) == 0xC0U);
size_t nby = 0;
for (u8 b = *it; (b & 0x80U) != 0; b <<= 1, ++nby) {(void)0;}
ASSERT(nby <= 7);
ASSERT((end - it) >= nby);
c = *it & (u8(0xFFU) >> (nby + 1));
for (size_t i = 1; i < nby; ++i) {
ASSERT((it[i] & 0xC0U) == 0x80U);
c = (c << 6) | (it[i] & 0x3FU);
}
it += nby;
}
r.push_back(c);
}
return r;
}
template <typename U16StrT = std::u16string>
inline static U16StrT Utf32To16(std::u32string const & s) {
static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
typedef typename U16StrT::value_type VT;
typedef uint16_t u16;
U16StrT r;
for (auto c: s) {
if (c <= 0xFFFFU)
r.push_back(VT(c));
else {
ASSERT(c <= 0x10FFFFU);
c -= 0x10000U;
r.push_back(VT(u16(0xD800U | ((c >> 10) & 0x3FFU))));
r.push_back(VT(u16(0xDC00U | (c & 0x3FFU))));
}
}
return r;
}
template <typename U16StrT>
inline static std::u32string Utf16To32(U16StrT const & s) {
static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
typedef uint16_t u16;
std::u32string r;
auto it = (u16 const *)s.c_str(), end = (u16 const *)(s.c_str() + s.length());
while (it < end) {
char32_t c = 0;
if (*it < 0xD800U || *it > 0xDFFFU) {
c = *it;
++it;
} else if (*it >= 0xDC00U) {
ASSERT_MSG(false, "Unallowed UTF-16 sequence!");
} else {
ASSERT(end - it >= 2);
c = (*it & 0x3FFU) << 10;
if ((it[1] < 0xDC00U) || (it[1] > 0xDFFFU)) {
ASSERT_MSG(false, "Unallowed UTF-16 sequence!");
} else {
c |= it[1] & 0x3FFU;
c += 0x10000U;
}
it += 2;
}
r.push_back(c);
}
return r;
}
template <typename StrT, size_t NumBytes = sizeof(typename StrT::value_type)> struct UtfHelper;
template <typename StrT> struct UtfHelper<StrT, 1> {
inline static std::u32string UtfTo32(StrT const & s) { return Utf8To32(s); }
inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To8<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 2> {
inline static std::u32string UtfTo32(StrT const & s) { return Utf16To32(s); }
inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To16<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 4> {
inline static std::u32string UtfTo32(StrT const & s) {
return std::u32string((char32_t const *)(s.c_str()), (char32_t const *)(s.c_str() + s.length()));
}
inline static StrT UtfFrom32(std::u32string const & s) {
return StrT((typename StrT::value_type const *)(s.c_str()),
(typename StrT::value_type const *)(s.c_str() + s.length()));
}
};
template <typename StrT> inline static std::u32string UtfTo32(StrT const & s) {
return UtfHelper<StrT>::UtfTo32(s);
}
template <typename StrT> inline static StrT UtfFrom32(std::u32string const & s) {
return UtfHelper<StrT>::UtfFrom32(s);
}
template <typename StrToT, typename StrFromT> inline static StrToT UtfConv(StrFromT const & s) {
return UtfFrom32<StrToT>(UtfTo32(s));
}
#define Test(cs)
std::cout << Utf32To8(Utf8To32(std::string(cs))) << ", ";
std::cout << Utf32To8(Utf16To32(Utf32To16(Utf8To32(std::string(cs))))) << ", ";
std::cout << Utf32To8(Utf16To32(std::u16string(u##cs))) << ", ";
std::cout << Utf32To8(std::u32string(U##cs)) << ", ";
std::cout << UtfConv<std::string>(UtfConv<std::u16string>(UtfConv<std::u32string>(UtfConv<std::u32string>(UtfConv<std::u16string>(std::string(cs)))))) << ", ";
std::cout << UtfConv<std::string>(UtfConv<std::wstring>(UtfConv<std::string>(UtfConv<std::u32string>(UtfConv<std::u32string>(std::string(cs)))))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::string(cs))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::u16string(u##cs))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::wstring(L##cs))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::u32string(U##cs))) << std::endl;
std::cout << "UTF-8 num bytes: " << std::dec << Utf32To8(std::u32string(U##cs)).size() << ", ";
std::cout << "UTF-16 num bytes: " << std::dec << (Utf32To16(std::u32string(U##cs)).size() * 2) << std::endl;
int main() {
#ifdef _WIN32
SetConsoleOutputCP(65001);
#else
std::setlocale(LC_ALL, "en_US.UTF-8");
#endif
try {
Test("World");
Test("Привет");
Test(" ");
Test(" ");
return 0;
} catch (std::exception const & ex) {
std::cout << "Exception: " << ex.what() << std::endl;
return -1;
}
}
输出:
World, World, World, World, World, World, World, World, World, World
UTF-8 num bytes: 5, UTF-16 num bytes: 10
Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет
UTF-8 num bytes: 12, UTF-16 num bytes: 12
, , , , , , , , ,
UTF-8 num bytes: 8, UTF-16 num bytes: 8
, , , , , , , , ,
UTF-8 num bytes: 4, UTF-16 num bytes: 4
变体2
在线尝试!
#include <string>
#include <iostream>
#include <stdexcept>
#include <type_traits>
#include <locale>
#include <codecvt>
#include <cstdint>
#ifdef _WIN32
#include <windows.h>
#else
#include <clocale>
#endif
#define ASSERT(cond) { if (!(cond)) throw std::runtime_error("Assertion (" #cond ") failed at line " + std::to_string(__LINE__) + "!"); }
// Workaround for some of MSVC compilers.
#if defined(_MSC_VER) && (!_DLL) && (_MSC_VER >= 1900 /* VS 2015*/) && (_MSC_VER <= 1914 /* VS 2017 */)
std::locale::id std::codecvt<char16_t, char, _Mbstatet>::id;
std::locale::id std::codecvt<char32_t, char, _Mbstatet>::id;
#endif
template <typename U8StrT>
inline static std::u32string Utf8To32(U8StrT const & s) {
static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t> utf_8_32_conv_;
return utf_8_32_conv_.from_bytes((char const *)s.c_str(), (char const *)(s.c_str() + s.length()));
}
template <typename U8StrT = std::string>
inline static U8StrT Utf32To8(std::u32string const & s) {
static_assert(sizeof(typename U8StrT::value_type) == 1, "Char byte-size should be 1 for UTF-8 strings!");
std::wstring_convert<std::codecvt_utf8<char32_t>, char32_t> utf_8_32_conv_;
std::string res = utf_8_32_conv_.to_bytes(s.c_str(), s.c_str() + s.length());
return U8StrT(
(typename U8StrT::value_type const *)(res.c_str()),
(typename U8StrT::value_type const *)(res.c_str() + res.length()));
}
template <typename U16StrT>
inline static std::u32string Utf16To32(U16StrT const & s) {
static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
std::wstring_convert<std::codecvt_utf16<char32_t, 0x10ffff, std::little_endian>, char32_t> utf_16_32_conv_;
return utf_16_32_conv_.from_bytes((char const *)s.c_str(), (char const *)(s.c_str() + s.length()));
}
template <typename U16StrT = std::u16string>
inline static U16StrT Utf32To16(std::u32string const & s) {
static_assert(sizeof(typename U16StrT::value_type) == 2, "Char byte-size should be 2 for UTF-16 strings!");
std::wstring_convert<std::codecvt_utf16<char32_t, 0x10ffff, std::little_endian>, char32_t> utf_16_32_conv_;
std::string res = utf_16_32_conv_.to_bytes(s.c_str(), s.c_str() + s.length());
return U16StrT(
(typename U16StrT::value_type const *)(res.c_str()),
(typename U16StrT::value_type const *)(res.c_str() + res.length()));
}
template <typename StrT, size_t NumBytes = sizeof(typename StrT::value_type)> struct UtfHelper;
template <typename StrT> struct UtfHelper<StrT, 1> {
inline static std::u32string UtfTo32(StrT const & s) { return Utf8To32(s); }
inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To8<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 2> {
inline static std::u32string UtfTo32(StrT const & s) { return Utf16To32(s); }
inline static StrT UtfFrom32(std::u32string const & s) { return Utf32To16<StrT>(s); }
};
template <typename StrT> struct UtfHelper<StrT, 4> {
inline static std::u32string UtfTo32(StrT const & s) {
return std::u32string((char32_t const *)(s.c_str()), (char32_t const *)(s.c_str() + s.length()));
}
inline static StrT UtfFrom32(std::u32string const & s) {
return StrT((typename StrT::value_type const *)(s.c_str()),
(typename StrT::value_type const *)(s.c_str() + s.length()));
}
};
template <typename StrT> inline static std::u32string UtfTo32(StrT const & s) {
return UtfHelper<StrT>::UtfTo32(s);
}
template <typename StrT> inline static StrT UtfFrom32(std::u32string const & s) {
return UtfHelper<StrT>::UtfFrom32(s);
}
template <typename StrToT, typename StrFromT> inline static StrToT UtfConv(StrFromT const & s) {
return UtfFrom32<StrToT>(UtfTo32(s));
}
#define Test(cs)
std::cout << Utf32To8(Utf8To32(std::string(cs))) << ", ";
std::cout << Utf32To8(Utf16To32(Utf32To16(Utf8To32(std::string(cs))))) << ", ";
std::cout << Utf32To8(Utf16To32(std::u16string(u##cs))) << ", ";
std::cout << Utf32To8(std::u32string(U##cs)) << ", ";
std::cout << UtfConv<std::string>(UtfConv<std::u16string>(UtfConv<std::u32string>(UtfConv<std::u32string>(UtfConv<std::u16string>(std::string(cs)))))) << ", ";
std::cout << UtfConv<std::string>(UtfConv<std::wstring>(UtfConv<std::string>(UtfConv<std::u32string>(UtfConv<std::u32string>(std::string(cs)))))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::string(cs))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::u16string(u##cs))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::wstring(L##cs))) << ", ";
std::cout << UtfFrom32<std::string>(UtfTo32(std::u32string(U##cs))) << std::endl;
std::cout << "UTF-8 num bytes: " << std::dec << Utf32To8(std::u32string(U##cs)).size() << ", ";
std::cout << "UTF-16 num bytes: " << std::dec << (Utf32To16(std::u32string(U##cs)).size() * 2) << std::endl;
int main() {
#ifdef _WIN32
SetConsoleOutputCP(65001);
#else
std::setlocale(LC_ALL, "en_US.UTF-8");
#endif
try {
Test("World");
Test("Привет");
Test(" ");
Test(" ");
return 0;
} catch (std::exception const & ex) {
std::cout << "Exception: " << ex.what() << std::endl;
return -1;
}
}
输出:
World, World, World, World, World, World, World, World, World, World
UTF-8 num bytes: 5, UTF-16 num bytes: 10
Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет, Привет
UTF-8 num bytes: 12, UTF-16 num bytes: 12
, , , , , , , , ,
UTF-8 num bytes: 8, UTF-16 num bytes: 8
, , , , , , , , ,
UTF-8 num bytes: 4, UTF-16 num bytes: 4
如果您使用的是C 11,则可以检查一下:
http://www.cplusplus.com/reference/codecvt/codecvt_utf8_utf16/
void encode_unicode_character(char* buffer, int* offset, wchar_t ucs_character)
{
if (ucs_character <= 0x7F)
{
// Plain single-byte ASCII.
buffer[(*offset)++] = (char) ucs_character;
}
else if (ucs_character <= 0x7FF)
{
// Two bytes.
buffer[(*offset)++] = 0xC0 | (ucs_character >> 6);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
}
else if (ucs_character <= 0xFFFF)
{
// Three bytes.
buffer[(*offset)++] = 0xE0 | (ucs_character >> 12);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
}
else if (ucs_character <= 0x1FFFFF)
{
// Four bytes.
buffer[(*offset)++] = 0xF0 | (ucs_character >> 18);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 12) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
}
else if (ucs_character <= 0x3FFFFFF)
{
// Five bytes.
buffer[(*offset)++] = 0xF8 | (ucs_character >> 24);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 18) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 12) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
}
else if (ucs_character <= 0x7FFFFFFF)
{
// Six bytes.
buffer[(*offset)++] = 0xFC | (ucs_character >> 30);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 24) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 18) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 12) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 6) & 0x3F);
buffer[(*offset)++] = 0x80 | ((ucs_character >> 0) & 0x3F);
}
else
{
// Invalid char; don't encode anything.
}
}
ISO10646-2012您需要了解UCS。
- 如何使用OpenCV将RBG图像转换为HSV,并将H、S和V值保存为C++中的3个独立图像
- 将"打开的CV图像"中的"颜色"转换为整数格式
- 了解 GLM- openGL 中的相机转换
- C++中的双指针类型转换
- 了解双精度转换与整数转换中的整数与截断关系
- C++要包含在词法强制转换中的头文件
- 是否可以使用 SAPI 忽略Microsoft文本到语音转换中的单词
- 处理字符到QString转换中的ASCII字符
- 反射格雷码到二进制转换中的幻数
- 自定义文本到语音转换中的语音
- C++ 为转换中的多数组错误增加值
- Java C/C++ 转换中的指针
- 标准::转换中的 [] const_iterator::value_type 是什么意思
- HSV 转换中的浮点错误
- 动态转换中的错误
- c++int存储为char*转换中的字符,无需任何API
- 引用类型转换中的c++虚函数
- 不同的编译器用来处理数字转换中的溢出的常见策略是什么?
- 隐式常量转换中的溢出[-Woverflow]
- 从c++到c# . net紧凑框架结构转换中的问题