Fix -Wshorten-64-to-32 errors in string conversion code

(This is a merge of http://go/wvgerrit/134313.)

This patch fixes code that would trigger -Wshorten-64-to-32 by
implicitly narrowing a variable from 64 to 32 bits. There were two cases
of this in string_conversions.cpp:

* The implicit conversions in DecodeBase64Char() were made explicit,
  which required going through both a reinterpret_cast and a static_cast
  to express fully.

* a2b_hex() now uses size_t for the length, as it probably always should
  have.

Bug: 194971260
Test: x86-64 tests
Change-Id: Ib9715b8adecc104f1a056ab0ac5faa5be74e5e54
This commit is contained in:
John W. Bruce
2021-10-20 13:56:49 -07:00
committed by John Bruce
parent 1447eba7bc
commit 921cc2c4a3

View File

@@ -33,9 +33,11 @@ const char kBase64SafeCodes[] =
// Decodes a single Base64 encoded character into its 6-bit value.
// The provided |codes| must be a Base64 character map.
int DecodeBase64Char(char c, const char* codes) {
const char* it = strchr(codes, c);
if (it == nullptr) return -1;
return it - codes;
const char* c_in_codes = strchr(codes, c);
if (c_in_codes == nullptr) return -1;
const uintptr_t c_in_codes_int = reinterpret_cast<uintptr_t>(c_in_codes);
const uintptr_t codes_int = reinterpret_cast<uintptr_t>(codes);
return static_cast<int>(c_in_codes_int - codes_int);
}
bool DecodeHexChar(char ch, uint8_t* digit) {
@@ -158,9 +160,9 @@ std::vector<uint8_t> Base64DecodeInternal(const char* encoded, size_t length,
// converts an ascii hex string(2 bytes per digit) into a decimal byte string
std::vector<uint8_t> a2b_hex(const std::string& byte) {
std::vector<uint8_t> array;
unsigned int count = byte.size();
size_t count = byte.size();
if (count == 0 || (count % 2) != 0) {
LOGE("Invalid input size %u for string %s", count, byte.c_str());
LOGE("Invalid input size %zu for string %s", count, byte.c_str());
return array;
}