int UTF16stoUTF8s(mem16_ptr_t utf16, mem64_t utf16_len, mem8_ptr_t utf8, mem64_t utf8_len) { cellL10n.Warning("UTF16stoUTF8s(utf16_addr=0x%x, utf16_len_addr=0x%x, utf8_addr=0x%x, utf8_len_addr=0x%x)", utf16.GetAddr(), utf16_len.GetAddr(), utf8.GetAddr(), utf8_len.GetAddr()); if (!utf16.IsGood() || !utf16_len.IsGood() || !utf8_len.IsGood()) return SRCIllegal; std::u16string wstr =(char16_t*)Memory.VirtualToRealAddr(utf16); wstr.resize(utf16_len.GetValue()); // TODO: Is this really the role of utf16_len in this function? #ifdef _MSC_VER std::wstring_convert<std::codecvt_utf8_utf16<char16_t>,char16_t> convert; std::string str = convert.to_bytes(wstr); if (!utf8.IsGood() || utf8_len.GetValue() < str.size()) { utf8_len = str.size(); return DSTExhausted; } utf8_len = str.size(); Memory.WriteString(utf8, str.c_str()); #endif return ConversionOK; }
int UTF16stoUTF8s(mem16_ptr_t utf16, mem64_t utf16_len, mem8_ptr_t utf8, mem64_t utf8_len) { cellL10n.Warning("UTF16stoUTF8s(utf16_addr=0x%x, utf16_len_addr=0x%x, utf8_addr=0x%x, utf8_len_addr=0x%x)", utf16.GetAddr(), utf16_len.GetAddr(), utf8.GetAddr(), utf8_len.GetAddr()); if (!utf16.IsGood() || !utf16_len.IsGood() || !utf8_len.IsGood()) return SRCIllegal; std::wstring wstr = (wchar_t*)Memory.VirtualToRealAddr(utf16); std::string str; int len = min((int)utf16_len.GetValue(), (int)wstr.size()); int size = (int)WideCharToMultiByte(CP_UTF8, 0, wstr.c_str(), len, 0, 0, NULL, NULL); if (!utf8.IsGood()) utf8_len = size; if (utf8_len.GetValue() < size) return DSTExhausted; #ifdef WIN32 WideCharToMultiByte(CP_UTF8, 0, wstr.c_str(), len, &str[0], size, NULL, NULL); #else // TODO #endif Memory.WriteString(utf8, str); return ConversionOK; }