Fix Format String Signedness
[ Merge of http://go/wvgerrit/110165 ] The conditional compilation for flagging the CDM and ODKiTEE logging functions as printf-like was guarded by a check on "__gnuc__" or "__clang__". However, GCC doesn't actually define "__gnuc__", it defines "__GNUC__", all caps. Fixing this causes GCC to find a slew of format-string errors that Clang was accepting. This patch fixes the capitalization and the uncovered errors, most of which fall into one of a few categories: 1) The format string and variable had different signedness. For these, the format strings are updated to match the variables. 2) The variable was an array index that was not of size_t. For these, the variables have been updated to be size_t and the format strings have been updated to use %zu. A few index variables that weren't actually used in format strings are also fixed to be size_t. 3) The code assumed the signedness of the internal representation of an integer literal, enum constant, or enum variable. For these, I either cast the input to a known type so that the format string is valid regardless of internal representation or I switched to a hexadecimal format string. The latter case is more useful on ODKiTEE enums where the literal value is in the code as a 32-bit hexadecimal value anyway. This patch also adds missing integer casts to the enum literals in the ODKiTEE logging header. (These are required for pedantic C99 compatibility when using literal values that do not fit into a 16-bit integer.) Bug: 173460694 Test: jenkins/odkitee_ta Test: build.py x86-64 Change-Id: I244972639a5a6ea0de157eb67e1e0dfa9787ec32
This commit is contained in:
@@ -127,7 +127,7 @@ void AdvanceDestBuffer(OEMCrypto_DestBufferDesc* dest_buffer, size_t bytes) {
|
||||
return;
|
||||
}
|
||||
LOGE("Unrecognized OEMCryptoBufferType %u - doing nothing",
|
||||
dest_buffer->type);
|
||||
static_cast<unsigned int>(dest_buffer->type));
|
||||
}
|
||||
|
||||
bool GetGenericSigningAlgorithm(CdmSigningAlgorithm algorithm,
|
||||
|
||||
@@ -700,8 +700,9 @@ void InitializationData::DumpToLogs() const {
|
||||
}
|
||||
if (pssh.has_protection_scheme()) {
|
||||
uint32_t scheme = pssh.protection_scheme();
|
||||
LOGD("InitData: Protection Scheme: %c%c%c%c", (scheme >> 24) & 0xFF,
|
||||
(scheme >> 16) & 0xFF, (scheme >> 8) & 0xFF, (scheme >> 0) & 0xFF);
|
||||
LOGD("InitData: Protection Scheme: %c%c%c%c",
|
||||
static_cast<char>(scheme >> 24), static_cast<char>(scheme >> 16),
|
||||
static_cast<char>(scheme >> 8), static_cast<char>(scheme >> 0));
|
||||
}
|
||||
switch (pssh.type()) {
|
||||
case video_widevine::WidevinePsshData_Type_SINGLE:
|
||||
|
||||
@@ -35,7 +35,7 @@ CORE_UTIL_EXPORT void InitLogging();
|
||||
|
||||
// Only enable format specifier warnings on LP64 systems. There is
|
||||
// no easy portable method to handle format specifiers for int64_t.
|
||||
#if (defined(__gnuc__) || defined(__clang__)) && defined(__LP64__)
|
||||
#if (defined(__GNUC__) || defined(__clang__)) && defined(__LP64__)
|
||||
[[gnu::format(printf, 5, 6)]] CORE_UTIL_EXPORT void Log(const char* file,
|
||||
const char* function,
|
||||
int line,
|
||||
|
||||
@@ -56,12 +56,12 @@ std::vector<uint8_t> a2b_hex(const std::string& byte) {
|
||||
return array;
|
||||
}
|
||||
|
||||
for (unsigned int i = 0; i < count / 2; ++i) {
|
||||
for (size_t i = 0; i < count / 2; ++i) {
|
||||
unsigned char msb = 0; // most significant 4 bits
|
||||
unsigned char lsb = 0; // least significant 4 bits
|
||||
if (!DecodeHexChar(byte[i * 2], &msb) ||
|
||||
!DecodeHexChar(byte[i * 2 + 1], &lsb)) {
|
||||
LOGE("Invalid hex value %c%c at index %d", byte[i * 2], byte[i * 2 + 1],
|
||||
LOGE("Invalid hex value %c%c at index %zu", byte[i * 2], byte[i * 2 + 1],
|
||||
i);
|
||||
return array;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user