diff options
Diffstat (limited to 'Userland/Libraries')
35 files changed, 162 insertions, 162 deletions
diff --git a/Userland/Libraries/LibCore/Gzip.cpp b/Userland/Libraries/LibCore/Gzip.cpp index 46ea61da20..87002b2b47 100644 --- a/Userland/Libraries/LibCore/Gzip.cpp +++ b/Userland/Libraries/LibCore/Gzip.cpp @@ -102,7 +102,7 @@ static Optional<ByteBuffer> get_gzip_payload(const ByteBuffer& data) } auto new_size = data.size() - current; - dbgln<debug_gzip>("get_gzip_payload: Returning slice from {} with size {}", current, new_size); + dbgln<GZIP_DEBUG>("get_gzip_payload: Returning slice from {} with size {}", current, new_size); return data.slice(current, new_size); } @@ -110,7 +110,7 @@ Optional<ByteBuffer> Gzip::decompress(const ByteBuffer& data) { ASSERT(is_compressed(data)); - dbgln<debug_gzip>("Gzip::decompress: Decompressing gzip compressed data. size={}", data.size()); + dbgln<GZIP_DEBUG>("Gzip::decompress: Decompressing gzip compressed data. size={}", data.size()); auto optional_payload = get_gzip_payload(data); if (!optional_payload.has_value()) { return Optional<ByteBuffer>(); @@ -122,7 +122,7 @@ Optional<ByteBuffer> Gzip::decompress(const ByteBuffer& data) while (true) { unsigned long destination_len = destination.size(); - if constexpr (debug_gzip) { + if constexpr (GZIP_DEBUG) { dbgln("Gzip::decompress: Calling puff()"); dbgln(" destination_data = {}", destination.data()); dbgln(" destination_len = {}", destination_len); diff --git a/Userland/Libraries/LibCore/NetworkJob.cpp b/Userland/Libraries/LibCore/NetworkJob.cpp index 8a9827d94d..c3a742ed4b 100644 --- a/Userland/Libraries/LibCore/NetworkJob.cpp +++ b/Userland/Libraries/LibCore/NetworkJob.cpp @@ -55,7 +55,7 @@ void NetworkJob::did_finish(NonnullRefPtr<NetworkResponse>&& response) NonnullRefPtr<NetworkJob> protector(*this); m_response = move(response); - dbgln<debug_cnetworkjob>("{} job did_finish", *this); + dbgln<CNETWORKJOB_DEBUG>("{} job did_finish", *this); ASSERT(on_finish); on_finish(true); shutdown(); diff --git a/Userland/Libraries/LibCore/Socket.cpp b/Userland/Libraries/LibCore/Socket.cpp index 3adbed2d4f..dbe0476b81 100644 --- a/Userland/Libraries/LibCore/Socket.cpp +++ b/Userland/Libraries/LibCore/Socket.cpp @@ -79,7 +79,7 @@ bool Socket::connect(const String& hostname, int port) } IPv4Address host_address((const u8*)hostent->h_addr_list[0]); - dbgln<debug_csocket>("Socket::connect: Resolved '{}' to {}", hostname, host_address); + dbgln<CSOCKET_DEBUG>("Socket::connect: Resolved '{}' to {}", hostname, host_address); return connect(host_address, port); } @@ -98,7 +98,7 @@ bool Socket::connect(const SocketAddress& address, int port) { ASSERT(!is_connected()); ASSERT(address.type() == SocketAddress::Type::IPv4); - dbgln<debug_csocket>("{} connecting to {}...", *this, address); + dbgln<CSOCKET_DEBUG>("{} connecting to {}...", *this, address); ASSERT(port > 0 && port <= 65535); @@ -119,7 +119,7 @@ bool Socket::connect(const SocketAddress& address) { ASSERT(!is_connected()); ASSERT(address.type() == SocketAddress::Type::Local); - dbgln<debug_csocket>("{} connecting to {}...", *this, address); + dbgln<CSOCKET_DEBUG>("{} connecting to {}...", *this, address); sockaddr_un saddr; saddr.sun_family = AF_LOCAL; @@ -138,7 +138,7 @@ bool Socket::connect(const SocketAddress& address) bool Socket::common_connect(const struct sockaddr* addr, socklen_t addrlen) { auto connected = [this] { - dbgln<debug_csocket>("{} connected!", *this); + dbgln<CSOCKET_DEBUG>("{} connected!", *this); if (!m_connected) { m_connected = true; ensure_read_notifier(); @@ -153,7 +153,7 @@ bool Socket::common_connect(const struct sockaddr* addr, socklen_t addrlen) int rc = ::connect(fd(), addr, addrlen); if (rc < 0) { if (errno == EINPROGRESS) { - dbgln<debug_csocket>("{} connection in progress (EINPROGRESS)", *this); + dbgln<CSOCKET_DEBUG>("{} connection in progress (EINPROGRESS)", *this); m_notifier = Notifier::construct(fd(), Notifier::Event::Write, this); m_notifier->on_ready_to_write = move(connected); return true; @@ -163,7 +163,7 @@ bool Socket::common_connect(const struct sockaddr* addr, socklen_t addrlen) errno = saved_errno; return false; } - dbgln<debug_csocket>("{} connected ok!", *this); + dbgln<CSOCKET_DEBUG>("{} connected ok!", *this); connected(); return true; } diff --git a/Userland/Libraries/LibCore/SyscallUtils.h b/Userland/Libraries/LibCore/SyscallUtils.h index 5815fb67cd..de14ffed23 100644 --- a/Userland/Libraries/LibCore/SyscallUtils.h +++ b/Userland/Libraries/LibCore/SyscallUtils.h @@ -42,9 +42,9 @@ inline int safe_syscall(Syscall syscall, Args&&... args) for (;;) { int sysret = syscall(forward<Args>(args)...); if (sysret == -1) { - if constexpr (debug_safe_syscall) { + if constexpr (SAFE_SYSCALL_DEBUG) { int saved_errno = errno; - dbgln<debug_safe_syscall>("Core::safe_syscall: {} ({}: {})", sysret, saved_errno, strerror(saved_errno)); + dbgln<SAFE_SYSCALL_DEBUG>("Core::safe_syscall: {} ({}: {})", sysret, saved_errno, strerror(saved_errno)); } if (errno == EINTR) diff --git a/Userland/Libraries/LibCrypto/Authentication/GHash.cpp b/Userland/Libraries/LibCrypto/Authentication/GHash.cpp index 9ac2247a77..1b6794bb3c 100644 --- a/Userland/Libraries/LibCrypto/Authentication/GHash.cpp +++ b/Userland/Libraries/LibCrypto/Authentication/GHash.cpp @@ -89,7 +89,7 @@ GHash::TagType GHash::process(ReadonlyBytes aad, ReadonlyBytes cipher) auto high = [](u64 value) -> u32 { return value >> 32; }; auto low = [](u64 value) -> u32 { return value & 0xffffffff; }; - if constexpr (debug_ghash_process) { + if constexpr (GHASH_PROCESS_DEBUG) { dbgln("AAD bits: {} : {}", high(aad_bits), low(aad_bits)); dbgln("Cipher bits: {} : {}", high(cipher_bits), low(cipher_bits)); dbgln("Tag bits: {} : {} : {} : {}", tag[0], tag[1], tag[2], tag[3]); @@ -100,7 +100,7 @@ GHash::TagType GHash::process(ReadonlyBytes aad, ReadonlyBytes cipher) tag[2] ^= high(cipher_bits); tag[3] ^= low(cipher_bits); - dbgln<debug_ghash_process>("Tag bits: {} : {} : {} : {}", tag[0], tag[1], tag[2], tag[3]); + dbgln<GHASH_PROCESS_DEBUG>("Tag bits: {} : {} : {} : {}", tag[0], tag[1], tag[2], tag[3]); galois_multiply(tag, m_key, tag); diff --git a/Userland/Libraries/LibCrypto/NumberTheory/ModularFunctions.cpp b/Userland/Libraries/LibCrypto/NumberTheory/ModularFunctions.cpp index c939bfb6c5..e9a29c46f1 100644 --- a/Userland/Libraries/LibCrypto/NumberTheory/ModularFunctions.cpp +++ b/Userland/Libraries/LibCrypto/NumberTheory/ModularFunctions.cpp @@ -231,7 +231,7 @@ UnsignedBigInteger LCM(const UnsignedBigInteger& a, const UnsignedBigInteger& b) UnsignedBigInteger::divide_without_allocation(a, gcd_output, temp_1, temp_2, temp_3, temp_4, temp_quotient, temp_remainder); UnsignedBigInteger::multiply_without_allocation(temp_quotient, b, temp_1, temp_2, temp_3, temp_4, output); - dbgln<debug_nt>("quot: {} rem: {} out: {}", temp_quotient, temp_remainder, output); + dbgln<NT_DEBUG>("quot: {} rem: {} out: {}", temp_quotient, temp_remainder, output); return output; } diff --git a/Userland/Libraries/LibCrypto/PK/RSA.cpp b/Userland/Libraries/LibCrypto/PK/RSA.cpp index d6e2320bdd..1dc8e1e62e 100644 --- a/Userland/Libraries/LibCrypto/PK/RSA.cpp +++ b/Userland/Libraries/LibCrypto/PK/RSA.cpp @@ -116,7 +116,7 @@ RSA::KeyPairType RSA::parse_rsa_key(ReadonlyBytes in) void RSA::encrypt(ReadonlyBytes in, Bytes& out) { - dbgln<debug_crypto>("in size: {}", in.size()); + dbgln<CRYPTO_DEBUG>("in size: {}", in.size()); auto in_integer = UnsignedBigInteger::import_data(in.data(), in.size()); if (!(in_integer < m_public_key.modulus())) { dbgln("value too large for key"); @@ -230,7 +230,7 @@ VerificationConsistency RSA_EMSA_PSS<HashFunction>::verify(ReadonlyBytes in) void RSA_PKCS1_EME::encrypt(ReadonlyBytes in, Bytes& out) { auto mod_len = (m_public_key.modulus().trimmed_length() * sizeof(u32) * 8 + 7) / 8; - dbgln<debug_crypto>("key size: {}", mod_len); + dbgln<CRYPTO_DEBUG>("key size: {}", mod_len); if (in.size() > mod_len - 11) { dbgln("message too long :("); out = out.trim(0); @@ -262,7 +262,7 @@ void RSA_PKCS1_EME::encrypt(ReadonlyBytes in, Bytes& out) out.overwrite(3 + ps_length, in.data(), in.size()); out = out.trim(3 + ps_length + in.size()); // should be a single block - dbgln<debug_crypto>("padded output size: {} buffer size: {}", 3 + ps_length + in.size(), out.size()); + dbgln<CRYPTO_DEBUG>("padded output size: {} buffer size: {}", 3 + ps_length + in.size(), out.size()); RSA::encrypt(out, out); } diff --git a/Userland/Libraries/LibDebug/Dwarf/LineProgram.cpp b/Userland/Libraries/LibDebug/Dwarf/LineProgram.cpp index 9dab951d2f..9405d9f6cf 100644 --- a/Userland/Libraries/LibDebug/Dwarf/LineProgram.cpp +++ b/Userland/Libraries/LibDebug/Dwarf/LineProgram.cpp @@ -235,7 +235,7 @@ void LineProgram::handle_sepcial_opcode(u8 opcode) m_address += address_increment; m_line += line_increment; - if constexpr (debug_dwarf) { + if constexpr (DWARF_DEBUG) { dbgln("Special adjusted_opcode: {}, address_increment: {}, line_increment: {}", adjusted_opcode, address_increment, line_increment); dbgln("Address is now: {:p}, and line is: {}:{}", m_address, m_source_files[m_file_index].name, m_line); } @@ -251,7 +251,7 @@ void LineProgram::run_program() u8 opcode = 0; m_stream >> opcode; - dbgln<debug_dwarf>("{:p}: opcode: {}", m_stream.offset() - 1, opcode); + dbgln<DWARF_DEBUG>("{:p}: opcode: {}", m_stream.offset() - 1, opcode); if (opcode == 0) { handle_extended_opcode(); diff --git a/Userland/Libraries/LibDiff/Hunks.cpp b/Userland/Libraries/LibDiff/Hunks.cpp index 2d9c77405a..d94bedfa03 100644 --- a/Userland/Libraries/LibDiff/Hunks.cpp +++ b/Userland/Libraries/LibDiff/Hunks.cpp @@ -77,7 +77,7 @@ Vector<Hunk> parse_hunks(const String& diff) hunks.append(hunk); } - if constexpr (debug_hunks) { + if constexpr (HUNKS_DEBUG) { for (const auto& hunk : hunks) { dbgln("Hunk location:"); dbgln(" orig: {}", hunk.original_start_line); diff --git a/Userland/Libraries/LibGUI/CppSyntaxHighlighter.cpp b/Userland/Libraries/LibGUI/CppSyntaxHighlighter.cpp index a4c6fd1205..f0e0171f2e 100644 --- a/Userland/Libraries/LibGUI/CppSyntaxHighlighter.cpp +++ b/Userland/Libraries/LibGUI/CppSyntaxHighlighter.cpp @@ -84,7 +84,7 @@ void CppSyntaxHighlighter::rehighlight(Gfx::Palette palette) Vector<GUI::TextDocumentSpan> spans; for (auto& token : tokens) { - dbgln<debug_syntax_highlighting>("{} @ {}:{} - {}:{}", token.to_string(), token.m_start.line, token.m_start.column, token.m_end.line, token.m_end.column); + dbgln<SYNTAX_HIGHLIGHTING_DEBUG>("{} @ {}:{} - {}:{}", token.to_string(), token.m_start.line, token.m_start.column, token.m_end.line, token.m_end.column); GUI::TextDocumentSpan span; span.range.set_start({ token.m_start.line, token.m_start.column }); span.range.set_end({ token.m_end.line, token.m_end.column }); diff --git a/Userland/Libraries/LibGUI/JSSyntaxHighlighter.cpp b/Userland/Libraries/LibGUI/JSSyntaxHighlighter.cpp index 2f70a59098..267b572cd6 100644 --- a/Userland/Libraries/LibGUI/JSSyntaxHighlighter.cpp +++ b/Userland/Libraries/LibGUI/JSSyntaxHighlighter.cpp @@ -108,7 +108,7 @@ void JSSyntaxHighlighter::rehighlight(Gfx::Palette palette) spans.append(span); advance_position(str[str.length() - 1]); - dbgln<debug_syntax_highlighting>("{}{} @ '{}' {}:{} - {}:{}", + dbgln<SYNTAX_HIGHLIGHTING_DEBUG>("{}{} @ '{}' {}:{} - {}:{}", token.name(), is_trivia ? " (trivia)" : "", token.value(), diff --git a/Userland/Libraries/LibGUI/WindowServerConnection.cpp b/Userland/Libraries/LibGUI/WindowServerConnection.cpp index 279f7a1a89..73e2715f28 100644 --- a/Userland/Libraries/LibGUI/WindowServerConnection.cpp +++ b/Userland/Libraries/LibGUI/WindowServerConnection.cpp @@ -141,25 +141,25 @@ void WindowServerConnection::handle(const Messages::WindowClient::KeyDown& messa auto key_event = make<KeyEvent>(Event::KeyDown, (KeyCode)message.key(), message.modifiers(), message.code_point(), message.scancode()); Action* action = nullptr; - dbgln<debug_keyboard_shortcuts>("Looking up action for {}", key_event->to_string()); + dbgln<KEYBOARD_SHORTCUTS_DEBUG>("Looking up action for {}", key_event->to_string()); if (auto* focused_widget = window->focused_widget()) { for (auto* widget = focused_widget; widget && !action; widget = widget->parent_widget()) { action = widget->action_for_key_event(*key_event); - dbgln<debug_keyboard_shortcuts>(" > Focused widget {} gave action: {}", *widget, action); + dbgln<KEYBOARD_SHORTCUTS_DEBUG>(" > Focused widget {} gave action: {}", *widget, action); } } if (!action) { action = window->action_for_key_event(*key_event); - dbgln<debug_keyboard_shortcuts>(" > Asked window {}, got action: {}", *window, action); + dbgln<KEYBOARD_SHORTCUTS_DEBUG>(" > Asked window {}, got action: {}", *window, action); } // NOTE: Application-global shortcuts are ignored while a modal window is up. if (!action && !window->is_modal()) { action = Application::the()->action_for_key_event(*key_event); - dbgln<debug_keyboard_shortcuts>(" > Asked application, got action: {}", action); + dbgln<KEYBOARD_SHORTCUTS_DEBUG>(" > Asked application, got action: {}", action); } if (action) { diff --git a/Userland/Libraries/LibGemini/Job.cpp b/Userland/Libraries/LibGemini/Job.cpp index 5cad2399f9..5175dd100d 100644 --- a/Userland/Libraries/LibGemini/Job.cpp +++ b/Userland/Libraries/LibGemini/Job.cpp @@ -67,7 +67,7 @@ void Job::on_socket_connected() m_sent_data = true; auto raw_request = m_request.to_raw_request(); - if constexpr (debug_job) { + if constexpr (JOB_DEBUG) { dbgln("Job: raw_request:"); dbgln("{}", String::copy(raw_request)); } diff --git a/Userland/Libraries/LibGfx/BMPLoader.cpp b/Userland/Libraries/LibGfx/BMPLoader.cpp index b84bab5c0a..7954142800 100644 --- a/Userland/Libraries/LibGfx/BMPLoader.cpp +++ b/Userland/Libraries/LibGfx/BMPLoader.cpp @@ -317,7 +317,7 @@ static u8 get_scaled_color(u32 data, u8 mask_size, i8 mask_shift) // to scale the values in order to reach the proper value of 255. static u32 int_to_scaled_rgb(BMPLoadingContext& context, u32 data) { - dbgln<debug_bmp>("DIB info sizes before access: #masks={}, #mask_sizes={}, #mask_shifts={}", + dbgln<BMP_DEBUG>("DIB info sizes before access: #masks={}, #mask_sizes={}, #mask_shifts={}", context.dib.info.masks.size(), context.dib.info.mask_sizes.size(), context.dib.info.mask_shifts.size()); @@ -465,7 +465,7 @@ static bool decode_bmp_header(BMPLoadingContext& context) return true; if (!context.file_bytes || context.file_size < bmp_header_size) { - dbgln<debug_bmp>("Missing BMP header"); + dbgln<BMP_DEBUG>("Missing BMP header"); context.state = BMPLoadingContext::State::Error; return false; } @@ -474,7 +474,7 @@ static bool decode_bmp_header(BMPLoadingContext& context) u16 header = streamer.read_u16(); if (header != 0x4d42) { - dbgln<debug_bmp>("BMP has invalid magic header number: {:#04x}", header); + dbgln<BMP_DEBUG>("BMP has invalid magic header number: {:#04x}", header); context.state = BMPLoadingContext::State::Error; return false; } @@ -490,13 +490,13 @@ static bool decode_bmp_header(BMPLoadingContext& context) streamer.drop_bytes(4); context.data_offset = streamer.read_u32(); - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP file size: {}", context.file_size); dbgln("BMP data offset: {}", context.data_offset); } if (context.data_offset >= context.file_size) { - dbgln<debug_bmp>("BMP data offset is beyond file end?!"); + dbgln<BMP_DEBUG>("BMP data offset is beyond file end?!"); return false; } @@ -549,7 +549,7 @@ static bool decode_bmp_core_dib(BMPLoadingContext& context, Streamer& streamer) return false; } - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP width: {}", core.width); dbgln("BMP height: {}", core.height); dbgln("BMP bits_per_pixel: {}", core.bpp); @@ -598,7 +598,7 @@ static bool decode_bmp_osv2_dib(BMPLoadingContext& context, Streamer& streamer, return false; } - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP width: {}", core.width); dbgln("BMP height: {}", core.height); dbgln("BMP bits_per_pixel: {}", core.bpp); @@ -638,7 +638,7 @@ static bool decode_bmp_osv2_dib(BMPLoadingContext& context, Streamer& streamer, // ColorEncoding (4) + Identifier (4) streamer.drop_bytes(8); - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP compression: {}", info.compression); dbgln("BMP image size: {}", info.image_size); dbgln("BMP horizontal res: {}", info.horizontal_resolution); @@ -678,7 +678,7 @@ static bool decode_bmp_info_dib(BMPLoadingContext& context, Streamer& streamer) if (info.number_of_important_palette_colors == 0) info.number_of_important_palette_colors = info.number_of_palette_colors; - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP compression: {}", info.compression); dbgln("BMP image size: {}", info.image_size); dbgln("BMP horizontal res: {}", info.horizontal_resolution); @@ -699,7 +699,7 @@ static bool decode_bmp_v2_dib(BMPLoadingContext& context, Streamer& streamer) context.dib.info.masks.append(streamer.read_u32()); context.dib.info.masks.append(streamer.read_u32()); - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP red mask: {:#08x}", context.dib.info.masks[0]); dbgln("BMP green mask: {:#08x}", context.dib.info.masks[1]); dbgln("BMP blue mask: {:#08x}", context.dib.info.masks[2]); @@ -719,12 +719,12 @@ static bool decode_bmp_v3_dib(BMPLoadingContext& context, Streamer& streamer) // suite results. if (context.dib.info.compression == Compression::ALPHABITFIELDS) { context.dib.info.masks.append(streamer.read_u32()); - dbgln<debug_bmp>("BMP alpha mask: {:#08x}", context.dib.info.masks[3]); + dbgln<BMP_DEBUG>("BMP alpha mask: {:#08x}", context.dib.info.masks[3]); } else if (context.dib_size() >= 56 && context.dib.core.bpp >= 16) { auto mask = streamer.read_u32(); if ((context.dib.core.bpp == 32 && mask != 0) || context.dib.core.bpp == 16) { context.dib.info.masks.append(mask); - dbgln<debug_bmp>("BMP alpha mask: {:#08x}", mask); + dbgln<BMP_DEBUG>("BMP alpha mask: {:#08x}", mask); } } else { streamer.drop_bytes(4); @@ -745,7 +745,7 @@ static bool decode_bmp_v4_dib(BMPLoadingContext& context, Streamer& streamer) v4.blue_endpoint = { streamer.read_i32(), streamer.read_i32(), streamer.read_i32() }; v4.gamma_endpoint = { streamer.read_u32(), streamer.read_u32(), streamer.read_u32() }; - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP color space: {}", v4.color_space); dbgln("BMP red endpoint: {}", v4.red_endpoint); dbgln("BMP green endpoint: {}", v4.green_endpoint); @@ -766,7 +766,7 @@ static bool decode_bmp_v5_dib(BMPLoadingContext& context, Streamer& streamer) v5.profile_data = streamer.read_u32(); v5.profile_size = streamer.read_u32(); - if constexpr (debug_bmp) { + if constexpr (BMP_DEBUG) { dbgln("BMP intent: {}", v5.intent); dbgln("BMP profile data: {}", v5.profile_data); dbgln("BMP profile size: {}", v5.profile_size); @@ -801,7 +801,7 @@ static bool decode_bmp_dib(BMPLoadingContext& context) streamer = Streamer(context.file_bytes + bmp_header_size + 4, context.data_offset - bmp_header_size - 4); - dbgln<debug_bmp>("BMP dib size: {}", dib_size); + dbgln<BMP_DEBUG>("BMP dib size: {}", dib_size); bool error = false; @@ -931,7 +931,7 @@ static bool uncompress_bmp_rle_data(BMPLoadingContext& context, ByteBuffer& buff { // RLE-compressed images cannot be stored top-down if (context.dib.core.height < 0) { - dbgln<debug_bmp>("BMP is top-down and RLE compressed"); + dbgln<BMP_DEBUG>("BMP is top-down and RLE compressed"); context.state = BMPLoadingContext::State::Error; return false; } diff --git a/Userland/Libraries/LibGfx/GIFLoader.cpp b/Userland/Libraries/LibGfx/GIFLoader.cpp index 0418244d93..76d2b0adcd 100644 --- a/Userland/Libraries/LibGfx/GIFLoader.cpp +++ b/Userland/Libraries/LibGfx/GIFLoader.cpp @@ -212,13 +212,13 @@ public: } if (m_current_code > m_code_table.size()) { - dbgln<debug_gif>("Corrupted LZW stream, invalid code: {} at bit index {}, code table size: {}", + dbgln<GIF_DEBUG>("Corrupted LZW stream, invalid code: {} at bit index {}, code table size: {}", m_current_code, m_current_bit_index, m_code_table.size()); return {}; } else if (m_current_code == m_code_table.size() && m_output.is_empty()) { - dbgln<debug_gif>("Corrupted LZW stream, valid new code but output buffer is empty: {} at bit index {}, code table size: {}", + dbgln<GIF_DEBUG>("Corrupted LZW stream, valid new code but output buffer is empty: {} at bit index {}, code table size: {}", m_current_code, m_current_bit_index, m_code_table.size()); @@ -527,12 +527,12 @@ static bool load_gif_frame_descriptors(GIFLoadingContext& context) if (extension_type == 0xFF) { if (sub_block.size() != 14) { - dbgln<debug_gif>("Unexpected application extension size: {}", sub_block.size()); + dbgln<GIF_DEBUG>("Unexpected application extension size: {}", sub_block.size()); continue; } if (sub_block[11] != 1) { - dbgln<debug_gif>("Unexpected application extension format"); + dbgln<GIF_DEBUG>("Unexpected application extension format"); continue; } diff --git a/Userland/Libraries/LibGfx/JPGLoader.cpp b/Userland/Libraries/LibGfx/JPGLoader.cpp index 33f03daeeb..8de60841db 100644 --- a/Userland/Libraries/LibGfx/JPGLoader.cpp +++ b/Userland/Libraries/LibGfx/JPGLoader.cpp @@ -230,13 +230,13 @@ static void generate_huffman_codes(HuffmanTableSpec& table) static Optional<size_t> read_huffman_bits(HuffmanStreamState& hstream, size_t count = 1) { if (count > (8 * sizeof(size_t))) { - dbgln<debug_jpg>("Can't read {} bits at once!", count); + dbgln<JPG_DEBUG>("Can't read {} bits at once!", count); return {}; } size_t value = 0; while (count--) { if (hstream.byte_offset >= hstream.stream.size()) { - dbgln<debug_jpg>("Huffman stream exhausted. This could be an error!"); + dbgln<JPG_DEBUG>("Huffman stream exhausted. This could be an error!"); return {}; } u8 current_byte = hstream.stream[hstream.byte_offset]; @@ -313,7 +313,7 @@ static bool build_macroblocks(JPGLoadingContext& context, Vector<Macroblock>& ma // For DC coefficients, symbol encodes the length of the coefficient. auto dc_length = symbol_or_error.release_value(); if (dc_length > 11) { - dbgln<debug_jpg>("DC coefficient too long: {}!", dc_length); + dbgln<JPG_DEBUG>("DC coefficient too long: {}!", dc_length); return false; } @@ -350,13 +350,13 @@ static bool build_macroblocks(JPGLoadingContext& context, Vector<Macroblock>& ma j += run_length; if (j >= 64) { - dbgln<debug_jpg>("Run-length exceeded boundaries. Cursor: {}, Skipping: {}!", j, run_length); + dbgln<JPG_DEBUG>("Run-length exceeded boundaries. Cursor: {}, Skipping: {}!", j, run_length); return false; } u8 coeff_length = ac_symbol & 0x0F; if (coeff_length > 10) { - dbgln<debug_jpg>("AC coefficient too long: {}!", coeff_length); + dbgln<JPG_DEBUG>("AC coefficient too long: {}!", coeff_length); return false; } @@ -383,7 +383,7 @@ static Optional<Vector<Macroblock>> decode_huffman_stream(JPGLoadingContext& con Vector<Macroblock> macroblocks; macroblocks.resize(context.mblock_meta.padded_total); - if constexpr (debug_jpg) { + if constexpr (JPG_DEBUG) { dbgln("Image width: {}", context.frame.width); dbgln("Image height: {}", context.frame.height); dbgln("Macroblocks in a row: {}", context.mblock_meta.hpadded_count); @@ -422,7 +422,7 @@ static Optional<Vector<Macroblock>> decode_huffman_stream(JPGLoadingContext& con } if (!build_macroblocks(context, macroblocks, hcursor, vcursor)) { - if constexpr (debug_jpg) { + if constexpr (JPG_DEBUG) { dbgln("Failed to build Macroblock {}", i); dbgln("Huffman stream byte offset {}", context.huffman_stream.byte_offset); dbgln("Huffman stream bit offset {}", context.huffman_stream.bit_offset); @@ -445,7 +445,7 @@ static inline bool is_valid_marker(const Marker marker) if (marker >= JPG_APPN0 && marker <= JPG_APPNF) { if (marker != JPG_APPN0) - dbgln<debug_jpg>("{:#04x} not supported yet. The decoder may fail!", marker); + dbgln<JPG_DEBUG>("{:#04x} not supported yet. The decoder may fail!", marker); return true; } if (marker >= JPG_RESERVED1 && marker <= JPG_RESERVEDD) @@ -467,7 +467,7 @@ static inline bool is_valid_marker(const Marker marker) if (marker >= 0xFFC0 && marker <= 0xFFCF) { if (marker != 0xFFC4 && marker != 0xFFC8 && marker != 0xFFCC) { - dbgln<debug_jpg>("Decoding this frame-type (SOF{}) is not currently supported. Decoder will fail!", marker & 0xf); + dbgln<JPG_DEBUG>("Decoding this frame-type (SOF{}) is not currently supported. Decoder will fail!", marker & 0xf); return false; } } @@ -504,7 +504,7 @@ static inline Marker read_marker_at_cursor(InputMemoryStream& stream) static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& context) { if (context.state < JPGLoadingContext::State::FrameDecoded) { - dbgln<debug_jpg>("{}: SOS found before reading a SOF!", stream.offset()); + dbgln<JPG_DEBUG>("{}: SOS found before reading a SOF!", stream.offset()); return false; } @@ -519,7 +519,7 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con if (stream.handle_any_error()) return false; if (component_count != context.component_count) { - dbgln<debug_jpg>("{}: Unsupported number of components: {}!", stream.offset(), component_count); + dbgln<JPG_DEBUG>("{}: Unsupported number of components: {}!", stream.offset(), component_count); return false; } @@ -538,7 +538,7 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con return false; } } else { - dbgln<debug_jpg>("{}: Unsupported component id: {}!", stream.offset(), component_id); + dbgln<JPG_DEBUG>("{}: Unsupported component id: {}!", stream.offset(), component_id); return false; } @@ -551,17 +551,17 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con component->ac_destination_id = table_ids & 0x0F; if (context.dc_tables.size() != context.ac_tables.size()) { - dbgln<debug_jpg>("{}: DC & AC table count mismatch!", stream.offset()); + dbgln<JPG_DEBUG>("{}: DC & AC table count mismatch!", stream.offset()); return false; } if (!context.dc_tables.contains(component->dc_destination_id)) { - dbgln<debug_jpg>("DC table (id: {}) does not exist!", component->dc_destination_id); + dbgln<JPG_DEBUG>("DC table (id: {}) does not exist!", component->dc_destination_id); return false; } if (!context.ac_tables.contains(component->ac_destination_id)) { - dbgln<debug_jpg>("AC table (id: {}) does not exist!", component->ac_destination_id); + dbgln<JPG_DEBUG>("AC table (id: {}) does not exist!", component->ac_destination_id); return false; } } @@ -580,7 +580,7 @@ static bool read_start_of_scan(InputMemoryStream& stream, JPGLoadingContext& con return false; // The three values should be fixed for baseline JPEGs utilizing sequential DCT. if (spectral_selection_start != 0 || spectral_selection_end != 63 || successive_approximation != 0) { - dbgln<debug_jpg>("{}: ERROR! Start of Selection: {}, End of Selection: {}, Successive Approximation: {}!", + dbgln<JPG_DEBUG>("{}: ERROR! Start of Selection: {}, End of Selection: {}, Successive Approximation: {}!", stream.offset(), spectral_selection_start, spectral_selection_end, @@ -597,7 +597,7 @@ static bool read_reset_marker(InputMemoryStream& stream, JPGLoadingContext& cont return false; bytes_to_read -= 2; if (bytes_to_read != 2) { - dbgln<debug_jpg>("{}: Malformed reset marker found!", stream.offset()); + dbgln<JPG_DEBUG>("{}: Malformed reset marker found!", stream.offset()); return false; } context.dc_reset_interval = read_be_word(stream); @@ -623,11 +623,11 @@ static bool read_huffman_table(InputMemoryStream& stream, JPGLoadingContext& con u8 table_type = table_info >> 4; u8 table_destination_id = table_info & 0x0F; if (table_type > 1) { - dbgln<debug_jpg>("{}: Unrecognized huffman table: {}!", stream.offset(), table_type); + dbgln<JPG_DEBUG>("{}: Unrecognized huffman table: {}!", stream.offset(), table_type); return false; } if (table_destination_id > 1) { - dbgln<debug_jpg>("{}: Invalid huffman table destination id: {}!", stream.offset(), table_destination_id); + dbgln<JPG_DEBUG>("{}: Invalid huffman table destination id: {}!", stream.offset(), table_destination_id); return false; } @@ -667,7 +667,7 @@ static bool read_huffman_table(InputMemoryStream& stream, JPGLoadingContext& con } if (bytes_to_read != 0) { - dbgln<debug_jpg>("{}: Extra bytes detected in huffman header!", stream.offset()); + dbgln<JPG_DEBUG>("{}: Extra bytes detected in huffman header!", stream.offset()); return false; } return true; @@ -683,7 +683,7 @@ static inline bool validate_luma_and_modify_context(const ComponentSpec& luma, J context.hsample_factor = luma.hsample_factor; context.vsample_factor = luma.vsample_factor; - if constexpr (debug_jpg) { + if constexpr (JPG_DEBUG) { dbgln("Horizontal Subsampling Factor: {}", luma.hsample_factor); dbgln("Vertical Subsampling Factor: {}", luma.vsample_factor); } @@ -705,7 +705,7 @@ static inline void set_macroblock_metadata(JPGLoadingContext& context) static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& context) { if (context.state == JPGLoadingContext::FrameDecoded) { - dbgln<debug_jpg>("{}: SOF repeated!", stream.offset()); + dbgln<JPG_DEBUG>("{}: SOF repeated!", stream.offset()); return false; } @@ -721,7 +721,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co if (stream.handle_any_error()) return false; if (context.frame.precision != 8) { - dbgln<debug_jpg>("{}: SOF precision != 8!", stream.offset()); + dbgln<JPG_DEBUG>("{}: SOF precision != 8!", stream.offset()); return false; } @@ -732,7 +732,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co if (stream.handle_any_error()) return false; if (!context.frame.width || !context.frame.height) { - dbgln<debug_jpg>("{}: ERROR! Image height: {}, Image width: {}!", stream.offset(), context.frame.height, context.frame.width); + dbgln<JPG_DEBUG>("{}: ERROR! Image height: {}, Image width: {}!", stream.offset(), context.frame.height, context.frame.width); return false; } @@ -747,7 +747,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co if (stream.handle_any_error()) return false; if (context.component_count != 1 && context.component_count != 3) { - dbgln<debug_jpg>("{}: Unsupported number of components in SOF: {}!", stream.offset(), context.component_count); + dbgln<JPG_DEBUG>("{}: Unsupported number of components in SOF: {}!", stream.offset(), context.component_count); return false; } @@ -770,7 +770,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co // By convention, downsampling is applied only on chroma components. So we should // hope to see the maximum sampling factor in the luma component. if (!validate_luma_and_modify_context(component, context)) { - dbgln<debug_jpg>("{}: Unsupported luma subsampling factors: horizontal: {}, vertical: {}", + dbgln<JPG_DEBUG>("{}: Unsupported luma subsampling factors: horizontal: {}, vertical: {}", stream.offset(), component.hsample_factor, component.vsample_factor); @@ -778,7 +778,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co } } else { if (component.hsample_factor != 1 || component.vsample_factor != 1) { - dbgln<debug_jpg>("{}: Unsupported chroma subsampling factors: horizontal: {}, vertical: {}", + dbgln<JPG_DEBUG>("{}: Unsupported chroma subsampling factors: horizontal: {}, vertical: {}", stream.offset(), component.hsample_factor, component.vsample_factor); @@ -790,7 +790,7 @@ static bool read_start_of_frame(InputMemoryStream& stream, JPGLoadingContext& co if (stream.handle_any_error()) return false; if (component.qtable_id > 1) { - dbgln<debug_jpg>("{}: Unsupported quantization table id: {}!", stream.offset(), component.qtable_id); + dbgln<JPG_DEBUG>("{}: Unsupported quantization table id: {}!", stream.offset(), component.qtable_id); return false; } @@ -815,12 +815,12 @@ static bool read_quantization_table(InputMemoryStream& stream, JPGLoadingContext return false; u8 element_unit_hint = info_byte >> 4; if (element_unit_hint > 1) { - dbgln<debug_jpg>("{}: Unsupported unit hint in quantization table: {}!", stream.offset(), element_unit_hint); + dbgln<JPG_DEBUG>("{}: Unsupported unit hint in quantization table: {}!", stream.offset(), element_unit_hint); return false; } u8 table_id = info_byte & 0x0F; if (table_id > 1) { - dbgln<debug_jpg>("{}: Unsupported quantization table id: {}!", stream.offset(), table_id); + dbgln<JPG_DEBUG>("{}: Unsupported quantization table id: {}!", stream.offset(), table_id); return false; } u32* table = table_id == 0 ? context.luma_table : context.chroma_table; @@ -843,7 +843,7 @@ static bool read_quantization_table(InputMemoryStream& stream, JPGLoadingContext bytes_to_read -= 1 + (element_unit_hint == 0 ? 64 : 128); } if (bytes_to_read != 0) { - dbgln<debug_jpg>("{}: Invalid length for one or more quantization tables!", stream.offset()); + dbgln<JPG_DEBUG>("{}: Invalid length for one or more quantization tables!", stream.offset()); return false; } @@ -1109,7 +1109,7 @@ static bool parse_header(InputMemoryStream& stream, JPGLoadingContext& context) if (stream.handle_any_error()) return false; if (marker != JPG_SOI) { - dbgln<debug_jpg>("{}: SOI not found: {:x}!", stream.offset(), marker); + dbgln<JPG_DEBUG>("{}: SOI not found: {:x}!", stream.offset(), marker); return false; } for (;;) { @@ -1137,7 +1137,7 @@ static bool parse_header(InputMemoryStream& stream, JPGLoadingContext& context) case JPG_RST7: case JPG_SOI: case JPG_EOI: - dbgln<debug_jpg>("{}: Unexpected marker {:x}!", stream.offset(), marker); + dbgln<JPG_DEBUG>("{}: Unexpected marker {:x}!", stream.offset(), marker); return false; case JPG_SOF0: if (!read_start_of_frame(stream, context)) @@ -1160,7 +1160,7 @@ static bool parse_header(InputMemoryStream& stream, JPGLoadingContext& context) return read_start_of_scan(stream, context); default: if (!skip_marker_with_length(stream)) { - dbgln<debug_jpg>("{}: Error skipping marker: {:x}!", stream.offset(), marker); + dbgln<JPG_DEBUG>("{}: Error skipping marker: {:x}!", stream.offset(), marker); return false; } break; @@ -1182,7 +1182,7 @@ static bool scan_huffman_stream(InputMemoryStream& stream, JPGLoadingContext& co last_byte = current_byte; stream >> current_byte; if (stream.handle_any_error()) { - dbgln<debug_jpg>("{}: EOI not found!", stream.offset()); + dbgln<JPG_DEBUG>("{}: EOI not found!", stream.offset()); return false; } @@ -1206,7 +1206,7 @@ static bool scan_huffman_stream(InputMemoryStream& stream, JPGLoadingContext& co return false; continue; } - dbgln<debug_jpg>("{}: Invalid marker: {:x}!", stream.offset(), marker); + dbgln<JPG_DEBUG>("{}: Invalid marker: {:x}!", stream.offset(), marker); return false; } else { context.huffman_stream.stream.append(last_byte); @@ -1227,7 +1227,7 @@ static bool decode_jpg(JPGLoadingContext& context) auto result = decode_huffman_stream(context); if (!result.has_value()) { - dbgln<debug_jpg>("{}: Failed to decode Macroblocks!", stream.offset()); + dbgln<JPG_DEBUG>("{}: Failed to decode Macroblocks!", stream.offset()); return false; } diff --git a/Userland/Libraries/LibGfx/PNGLoader.cpp b/Userland/Libraries/LibGfx/PNGLoader.cpp index 868cf1f0ab..904f51e109 100644 --- a/Userland/Libraries/LibGfx/PNGLoader.cpp +++ b/Userland/Libraries/LibGfx/PNGLoader.cpp @@ -613,7 +613,7 @@ static bool decode_png_bitmap_simple(PNGLoadingContext& context) } if (filter > 4) { - dbgln<debug_png>("Invalid PNG filter: {}", filter); + dbgln<PNG_DEBUG>("Invalid PNG filter: {}", filter); context.state = PNGLoadingContext::State::Error; return false; } @@ -715,7 +715,7 @@ static bool decode_adam7_pass(PNGLoadingContext& context, Streamer& streamer, in } if (filter > 4) { - dbgln<debug_png>("Invalid PNG filter: {}", filter); + dbgln<PNG_DEBUG>("Invalid PNG filter: {}", filter); context.state = PNGLoadingContext::State::Error; return false; } diff --git a/Userland/Libraries/LibGfx/Painter.cpp b/Userland/Libraries/LibGfx/Painter.cpp index b38eb1b4eb..e1fe64c78e 100644 --- a/Userland/Libraries/LibGfx/Painter.cpp +++ b/Userland/Libraries/LibGfx/Painter.cpp @@ -925,7 +925,7 @@ void Painter::draw_glyph_or_emoji(const IntPoint& point, u32 code_point, const F // Perhaps it's an emoji? auto* emoji = Emoji::emoji_for_code_point(code_point); if (emoji == nullptr) { - dbgln<debug_emoji>("Failed to find an emoji for code_point {}", code_point); + dbgln<EMOJI_DEBUG>("Failed to find an emoji for code_point {}", code_point); draw_glyph(point, '?', font, color); return; } @@ -1639,7 +1639,7 @@ void Painter::fill_path(Path& path, Color color, WindingRule winding_rule) // The points between this segment and the previous are // inside the shape - dbgln<debug_fill_path>("y={}: {} at {}: {} -- {}", scanline, winding_number, i, from, to); + dbgln<FILL_PATH_DEBUG>("y={}: {} at {}: {} -- {}", scanline, winding_number, i, from, to); draw_line(from, to, color, 1); } diff --git a/Userland/Libraries/LibGfx/PortableImageLoaderCommon.h b/Userland/Libraries/LibGfx/PortableImageLoaderCommon.h index 4417d053a7..dc6f3b3826 100644 --- a/Userland/Libraries/LibGfx/PortableImageLoaderCommon.h +++ b/Userland/Libraries/LibGfx/PortableImageLoaderCommon.h @@ -104,14 +104,14 @@ static bool read_magic_number(TContext& context, Streamer& streamer) if (!context.data || context.data_size < 2) { context.state = TContext::State::Error; - dbgln<debug_portable_image_loader>("There is no enough data for {}", TContext::image_type); + dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("There is no enough data for {}", TContext::image_type); return false; } u8 magic_number[2] {}; if (!streamer.read_bytes(magic_number, 2)) { context.state = TContext::State::Error; - dbgln<debug_portable_image_loader>("We can't read magic number for {}", TContext::image_type); + dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("We can't read magic number for {}", TContext::image_type); return false; } @@ -128,7 +128,7 @@ static bool read_magic_number(TContext& context, Streamer& streamer) } context.state = TContext::State::Error; - dbgln<debug_portable_image_loader>("Magic number is not valid for {}{}{}", magic_number[0], magic_number[1], TContext::image_type); + dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("Magic number is not valid for {}{}{}", magic_number[0], magic_number[1], TContext::image_type); return false; } @@ -186,7 +186,7 @@ static bool read_max_val(TContext& context, Streamer& streamer) } if (context.max_val > 255) { - dbgln<debug_portable_image_loader>("We can't parse 2 byte color for {}", TContext::image_type); + dbgln<PORTABLE_IMAGE_LOADER_DEBUG>("We can't parse 2 byte color for {}", TContext::image_type); context.state = TContext::Error; return false; } diff --git a/Userland/Libraries/LibHTTP/Job.cpp b/Userland/Libraries/LibHTTP/Job.cpp index 0c69848dab..eb5164daa3 100644 --- a/Userland/Libraries/LibHTTP/Job.cpp +++ b/Userland/Libraries/LibHTTP/Job.cpp @@ -36,14 +36,14 @@ namespace HTTP { static ByteBuffer handle_content_encoding(const ByteBuffer& buf, const String& content_encoding) { - dbgln<debug_job>("Job::handle_content_encoding: buf has content_encoding={}", content_encoding); + dbgln<JOB_DEBUG>("Job::handle_content_encoding: buf has content_encoding={}", content_encoding); if (content_encoding == "gzip") { if (!Core::Gzip::is_compressed(buf)) { dbgln("Job::handle_content_encoding: buf is not gzip compressed!"); } - dbgln<debug_job>("Job::handle_content_encoding: buf is gzip compressed!"); + dbgln<JOB_DEBUG>("Job::handle_content_encoding: buf is gzip compressed!"); auto uncompressed = Core::Gzip::decompress(buf); if (!uncompressed.has_value()) { @@ -51,7 +51,7 @@ static ByteBuffer handle_content_encoding(const ByteBuffer& buf, const String& c return buf; } - if constexpr (debug_job) { + if constexpr (JOB_DEBUG) { dbgln("Job::handle_content_encoding: Gzip::decompress() successful."); dbgln(" Input size: {}", buf.size()); dbgln(" Output size: {}", uncompressed.value().size()); @@ -77,7 +77,7 @@ void Job::flush_received_buffers() { if (!m_can_stream_response || m_buffered_size == 0) return; - dbgln<debug_job>("Job: Flushing received buffers: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); + dbgln<JOB_DEBUG>("Job: Flushing received buffers: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); for (size_t i = 0; i < m_received_buffers.size(); ++i) { auto& payload = m_received_buffers[i]; auto written = do_write(payload); @@ -92,7 +92,7 @@ void Job::flush_received_buffers() payload = payload.slice(written, payload.size() - written); break; } - dbgln<debug_job>("Job: Flushing received buffers done: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); + dbgln<JOB_DEBUG>("Job: Flushing received buffers done: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); } void Job::on_socket_connected() @@ -103,7 +103,7 @@ void Job::on_socket_connected() m_sent_data = true; auto raw_request = m_request.to_raw_request(); - if constexpr (debug_job) { + if constexpr (JOB_DEBUG) { dbgln("Job: raw_request:"); dbgln("{}", String::copy(raw_request)); } @@ -198,10 +198,10 @@ void Job::on_socket_connected() m_headers.set(name, value); if (name.equals_ignoring_case("Content-Encoding")) { // Assume that any content-encoding means that we can't decode it as a stream :( - dbgln<debug_job>("Content-Encoding {} detected, cannot stream output :(", value); + dbgln<JOB_DEBUG>("Content-Encoding {} detected, cannot stream output :(", value); m_can_stream_response = false; } - dbgln<debug_job>("Job: [{}] = '{}'", name, value); + dbgln<JOB_DEBUG>("Job: [{}] = '{}'", name, value); return; } ASSERT(m_state == State::InBody); @@ -216,7 +216,7 @@ void Job::on_socket_connected() // read size auto size_data = read_line(PAGE_SIZE); auto size_lines = size_data.view().lines(); - dbgln<debug_job>("Job: Received a chunk with size '{}'", size_data); + dbgln<JOB_DEBUG>("Job: Received a chunk with size '{}'", size_data); if (size_lines.size() == 0) { dbgln("Job: Reached end of stream"); finish_up(); @@ -239,26 +239,26 @@ void Job::on_socket_connected() m_current_chunk_total_size = 0; m_current_chunk_remaining_size = 0; - dbgln<debug_job>("Job: Received the last chunk with extensions '{}'", size_string.substring_view(1, size_string.length() - 1)); + dbgln<JOB_DEBUG>("Job: Received the last chunk with extensions '{}'", size_string.substring_view(1, size_string.length() - 1)); } else { m_current_chunk_total_size = size; m_current_chunk_remaining_size = size; read_size = size; - dbgln<debug_job>("Job: Chunk of size '{}' started", size); + dbgln<JOB_DEBUG>("Job: Chunk of size '{}' started", size); } } } else { read_size = remaining; - dbgln<debug_job>("Job: Resuming chunk with '{}' bytes left over", remaining); + dbgln<JOB_DEBUG>("Job: Resuming chunk with '{}' bytes left over", remaining); } } else { auto transfer_encoding = m_headers.get("Transfer-Encoding"); if (transfer_encoding.has_value()) { auto encoding = transfer_encoding.value(); - dbgln<debug_job>("Job: This content has transfer encoding '{}'", encoding); + dbgln<JOB_DEBUG>("Job: This content has transfer encoding '{}'", encoding); if (encoding.equals_ignoring_case("chunked")) { m_current_chunk_remaining_size = -1; goto read_chunk_size; @@ -289,9 +289,9 @@ void Job::on_socket_connected() if (m_current_chunk_remaining_size.has_value()) { auto size = m_current_chunk_remaining_size.value() - payload.size(); - dbgln<debug_job>("Job: We have {} bytes left over in this chunk", size); + dbgln<JOB_DEBUG>("Job: We have {} bytes left over in this chunk", size); if (size == 0) { - dbgln<debug_job>("Job: Finished a chunk of {} bytes", m_current_chunk_total_size.value()); + dbgln<JOB_DEBUG>("Job: Finished a chunk of {} bytes", m_current_chunk_total_size.value()); if (m_current_chunk_total_size.value() == 0) { m_state = State::Trailers; @@ -302,7 +302,7 @@ void Job::on_socket_connected() size = -1; [[maybe_unused]] auto line = read_line(PAGE_SIZE); - if constexpr (debug_job) + if constexpr (JOB_DEBUG) dbgln("Line following (should be empty): '{}'", line); } m_current_chunk_remaining_size = size; diff --git a/Userland/Libraries/LibMarkdown/Table.cpp b/Userland/Libraries/LibMarkdown/Table.cpp index 81bd1c5360..6cf0ef2bd6 100644 --- a/Userland/Libraries/LibMarkdown/Table.cpp +++ b/Userland/Libraries/LibMarkdown/Table.cpp @@ -182,7 +182,7 @@ OwnPtr<Table> Table::parse(Vector<StringView>::ConstIterator& lines) size_t relative_width = delimiter.length(); for (auto ch : delimiter) { if (ch != '-') { - dbgln<debug_markdown>("Invalid character _{}_ in table heading delimiter (ignored)", ch); + dbgln<MARKDOWN_DEBUG>("Invalid character _{}_ in table heading delimiter (ignored)", ch); --relative_width; } } diff --git a/Userland/Libraries/LibRegex/RegexByteCode.cpp b/Userland/Libraries/LibRegex/RegexByteCode.cpp index 5f124b3468..b996c4b7ca 100644 --- a/Userland/Libraries/LibRegex/RegexByteCode.cpp +++ b/Userland/Libraries/LibRegex/RegexByteCode.cpp @@ -370,7 +370,7 @@ ALWAYS_INLINE ExecutionResult OpCode_SaveRightNamedCaptureGroup::execute(const M auto& map = output.named_capture_group_matches.at(input.match_index); - if constexpr (debug_regex) { + if constexpr (REGEX_DEBUG) { ASSERT(start_position + length <= input.view.length()); dbgln("Save named capture group with name={} and content='{}'", capture_group_name, input.view.substring_view(start_position, length)); } diff --git a/Userland/Libraries/LibRegex/RegexMatcher.cpp b/Userland/Libraries/LibRegex/RegexMatcher.cpp index cadc74fece..16a6662351 100644 --- a/Userland/Libraries/LibRegex/RegexMatcher.cpp +++ b/Userland/Libraries/LibRegex/RegexMatcher.cpp @@ -148,7 +148,7 @@ RegexResult Matcher<Parser>::match(const Vector<RegexStringView> views, Optional for (auto& view : views) { input.view = view; - dbgln<debug_regex>("[match] Starting match with view ({}): _{}_", view.length(), view); + dbgln<REGEX_DEBUG>("[match] Starting match with view ({}): _{}_", view.length(), view); auto view_length = view.length(); size_t view_index = m_pattern.start_offset; @@ -214,7 +214,7 @@ RegexResult Matcher<Parser>::match(const Vector<RegexStringView> views, Optional continue; } - if constexpr (debug_regex) { + if constexpr (REGEX_DEBUG) { dbgln("state.string_position={}, view_index={}", state.string_position, view_index); dbgln("[match] Found a match (length={}): '{}'", state.string_position - view_index, input.view.substring_view(view_index, state.string_position - view_index)); } diff --git a/Userland/Libraries/LibTLS/ClientHandshake.cpp b/Userland/Libraries/LibTLS/ClientHandshake.cpp index 99ebc72822..60498e7366 100644 --- a/Userland/Libraries/LibTLS/ClientHandshake.cpp +++ b/Userland/Libraries/LibTLS/ClientHandshake.cpp @@ -112,7 +112,7 @@ ssize_t TLSv12::handle_hello(ReadonlyBytes buffer, WritePacketStage& write_packe return (i8)Error::NoCommonCipher; } m_context.cipher = cipher; - dbgln<debug_tls>("Cipher: {}", (u16)cipher); + dbgln<TLS_DEBUG>("Cipher: {}", (u16)cipher); // The handshake hash function is _always_ SHA256 m_context.handshake_hash.initialize(Crypto::Hash::HashKind::SHA256); @@ -146,7 +146,7 @@ ssize_t TLSv12::handle_hello(ReadonlyBytes buffer, WritePacketStage& write_packe u16 extension_length = AK::convert_between_host_and_network_endian(*(const u16*)buffer.offset_pointer(res)); res += 2; - dbgln<debug_tls>("extension {} with length {}", (u16)extension_type, extension_length); + dbgln<TLS_DEBUG>("extension {} with length {}", (u16)extension_type, extension_length); if (extension_length) { if (buffer.size() - res < extension_length) { @@ -218,12 +218,12 @@ ssize_t TLSv12::handle_finished(ReadonlyBytes buffer, WritePacketStage& write_pa u32 size = buffer[0] * 0x10000 + buffer[1] * 0x100 + buffer[2]; if (size < 12) { - dbgln<debug_tls>("finished packet smaller than minimum size: {}", size); + dbgln<TLS_DEBUG>("finished packet smaller than minimum size: {}", size); return (i8)Error::BrokenPacket; } if (size < buffer.size() - index) { - dbgln<debug_tls>("not enough data after length: {} > {}", size, buffer.size() - index); + dbgln<TLS_DEBUG>("not enough data after length: {} > {}", size, buffer.size() - index); return (i8)Error::NeedMoreData; } @@ -324,7 +324,7 @@ ssize_t TLSv12::handle_payload(ReadonlyBytes vbuffer) auto type = buffer[0]; auto write_packets { WritePacketStage::Initial }; size_t payload_size = buffer[1] * 0x10000 + buffer[2] * 0x100 + buffer[3] + 3; - dbgln<debug_tls>("payload size: {} buffer length: {}", payload_size, buffer_length); + dbgln<TLS_DEBUG>("payload size: {} buffer length: {}", payload_size, buffer_length); if (payload_size + 1 > buffer_length) return (i8)Error::NeedMoreData; diff --git a/Userland/Libraries/LibTLS/Record.cpp b/Userland/Libraries/LibTLS/Record.cpp index 5cdb816fa9..be3e136681 100644 --- a/Userland/Libraries/LibTLS/Record.cpp +++ b/Userland/Libraries/LibTLS/Record.cpp @@ -39,12 +39,12 @@ void TLSv12::write_packet(ByteBuffer& packet) m_context.tls_buffer.append(packet.data(), packet.size()); if (m_context.connection_status > ConnectionStatus::Disconnected) { if (!m_has_scheduled_write_flush) { - dbgln<debug_tls>("Scheduling write of {}", m_context.tls_buffer.size()); + dbgln<TLS_DEBUG>("Scheduling write of {}", m_context.tls_buffer.size()); deferred_invoke([this](auto&) { write_into_socket(); }); m_has_scheduled_write_flush = true; } else { // multiple packet are available, let's flush some out - dbgln<debug_tls>("Flushing scheduled write of {}", m_context.tls_buffer.size()); + dbgln<TLS_DEBUG>("Flushing scheduled write of {}", m_context.tls_buffer.size()); write_into_socket(); // the deferred invoke is still in place m_has_scheduled_write_flush = true; @@ -216,7 +216,7 @@ ByteBuffer TLSv12::hmac_message(const ReadonlyBytes& buf, const Optional<Readonl auto digest = hmac.digest(); auto mac = ByteBuffer::copy(digest.immutable_data(), digest.data_length()); - if constexpr (debug_tls) { + if constexpr (TLS_DEBUG) { dbgln("HMAC of the block for sequence number {}", sequence_number); print_buffer(mac); } @@ -230,7 +230,7 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer) size_t header_size = res; ssize_t payload_res = 0; - dbgln<debug_tls>("buffer size: {}", buffer.size()); + dbgln<TLS_DEBUG>("buffer size: {}", buffer.size()); if (buffer.size() < 5) { return (i8)Error::NeedMoreData; @@ -241,7 +241,7 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer) // FIXME: Read the version and verify it - if constexpr (debug_tls) { + if constexpr (TLS_DEBUG) { auto version = (Version) * (const u16*)buffer.offset_pointer(buffer_position); dbgln("type={}, version={}", (u8)type, (u16)version); } @@ -249,21 +249,21 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer) buffer_position += 2; auto length = AK::convert_between_host_and_network_endian(*(const u16*)buffer.offset_pointer(buffer_position)); - dbgln<debug_tls>("record length: {} at offset: {}", length, buffer_position); + dbgln<TLS_DEBUG>("record length: {} at offset: {}", length, buffer_position); buffer_position += 2; if (buffer_position + length > buffer.size()) { - dbgln<debug_tls>("record length more than what we have: {}", buffer.size()); + dbgln<TLS_DEBUG>("record length more than what we have: {}", buffer.size()); return (i8)Error::NeedMoreData; } - dbgln<debug_tls>("message type: {}, length: {}", (u8)type, length); + dbgln<TLS_DEBUG>("message type: {}, length: {}", (u8)type, length); auto plain = buffer.slice(buffer_position, buffer.size() - buffer_position); ByteBuffer decrypted; if (m_context.cipher_spec_set && type != MessageType::ChangeCipher) { - if constexpr (debug_tls) { + if constexpr (TLS_DEBUG) { dbgln("Encrypted: "); print_buffer(buffer.slice(header_size, length)); } @@ -389,7 +389,7 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer) auto packet = build_alert(true, (u8)AlertDescription::UnexpectedMessage); write_packet(packet); } else { - dbgln<debug_tls>("application data message of size {}", plain.size()); + dbgln<TLS_DEBUG>("application data message of size {}", plain.size()); m_context.application_buffer.append(plain.data(), plain.size()); } @@ -414,9 +414,9 @@ ssize_t TLSv12::handle_message(ReadonlyBytes buffer) } break; case MessageType::Alert: - dbgln<debug_tls>("alert message of length {}", length); + dbgln<TLS_DEBUG>("alert message of length {}", length); if (length >= 2) { - if constexpr (debug_tls) + if constexpr (TLS_DEBUG) print_buffer(plain); auto level = plain[0]; diff --git a/Userland/Libraries/LibTLS/Socket.cpp b/Userland/Libraries/LibTLS/Socket.cpp index 93b58064de..a8757cfb1b 100644 --- a/Userland/Libraries/LibTLS/Socket.cpp +++ b/Userland/Libraries/LibTLS/Socket.cpp @@ -174,7 +174,7 @@ void TLSv12::read_from_socket() void TLSv12::write_into_socket() { - dbgln<debug_tls>("Flushing cached records: {} established? {}", m_context.tls_buffer.size(), is_established()); + dbgln<TLS_DEBUG>("Flushing cached records: {} established? {}", m_context.tls_buffer.size(), is_established()); m_has_scheduled_write_flush = false; if (!check_connection_state(false)) @@ -199,7 +199,7 @@ bool TLSv12::check_connection_state(bool read) m_context.connection_finished = true; } if (m_context.critical_error) { - dbgln<debug_tls>("CRITICAL ERROR {} :(", m_context.critical_error); + dbgln<TLS_DEBUG>("CRITICAL ERROR {} :(", m_context.critical_error); if (on_tls_error) on_tls_error((AlertDescription)m_context.critical_error); @@ -211,7 +211,7 @@ bool TLSv12::check_connection_state(bool read) on_tls_finished(); } if (m_context.tls_buffer.size()) { - dbgln<debug_tls>("connection closed without finishing data transfer, {} bytes still in buffer and {} bytes in application buffer", + dbgln<TLS_DEBUG>("connection closed without finishing data transfer, {} bytes still in buffer and {} bytes in application buffer", m_context.tls_buffer.size(), m_context.application_buffer.size()); } else { @@ -247,7 +247,7 @@ bool TLSv12::flush() } if (m_context.send_retries++ == 10) { // drop the records, we can't send - dbgln<debug_tls>("Dropping {} bytes worth of TLS records as max retries has been reached", write_buffer().size()); + dbgln<TLS_DEBUG>("Dropping {} bytes worth of TLS records as max retries has been reached", write_buffer().size()); write_buffer().clear(); m_context.send_retries = 0; } diff --git a/Userland/Libraries/LibTLS/TLSv12.cpp b/Userland/Libraries/LibTLS/TLSv12.cpp index 62534771dc..b94f6d47b0 100644 --- a/Userland/Libraries/LibTLS/TLSv12.cpp +++ b/Userland/Libraries/LibTLS/TLSv12.cpp @@ -406,7 +406,7 @@ static ssize_t _parse_asn1(const Context& context, Certificate& cert, const u8* hash.initialize(Crypto::Hash::HashKind::SHA512); break; default: - dbgln<debug_tls>("Unsupported hash mode {}", (u32)cert.key_algorithm); + dbgln<TLS_DEBUG>("Unsupported hash mode {}", (u32)cert.key_algorithm); // fallback to md5, it will fail later hash.initialize(Crypto::Hash::HashKind::MD5); break; @@ -436,7 +436,7 @@ Optional<Certificate> TLSv12::parse_asn1(ReadonlyBytes buffer, bool) const _parse_asn1(m_context, cert, buffer.data(), buffer.size(), 1, fields, nullptr, 0, nullptr, nullptr); - dbgln<debug_tls>("Certificate issued for {} by {}", cert.subject, cert.issuer_subject); + dbgln<TLS_DEBUG>("Certificate issued for {} by {}", cert.subject, cert.issuer_subject); return cert; } @@ -454,7 +454,7 @@ ssize_t TLSv12::handle_certificate(ReadonlyBytes buffer) u32 certificate_total_length = buffer[0] * 0x10000 + buffer[1] * 0x100 + buffer[2]; - dbgln<debug_tls>("total length: {}", certificate_total_length); + dbgln<TLS_DEBUG>("total length: {}", certificate_total_length); if (certificate_total_length <= 4) return 3 * certificate_total_length; @@ -549,7 +549,7 @@ void TLSv12::consume(ReadonlyBytes record) return; } - dbgln<debug_tls>("Consuming {} bytes", record.size()); + dbgln<TLS_DEBUG>("Consuming {} bytes", record.size()); m_context.message_buffer.append(record.data(), record.size()); @@ -559,17 +559,17 @@ void TLSv12::consume(ReadonlyBytes record) size_t size_offset { 3 }; // read the common record header size_t header_size { 5 }; - dbgln<debug_tls>("message buffer length {}", buffer_length); + dbgln<TLS_DEBUG>("message buffer length {}", buffer_length); while (buffer_length >= 5) { auto length = AK::convert_between_host_and_network_endian(*(u16*)m_context.message_buffer.offset_pointer(index + size_offset)) + header_size; if (length > buffer_length) { - dbgln<debug_tls>("Need more data: {} > {}", length, buffer_length); + dbgln<TLS_DEBUG>("Need more data: {} > {}", length, buffer_length); break; } auto consumed = handle_message(m_context.message_buffer.bytes().slice(index, length)); - if constexpr (debug_tls) { + if constexpr (TLS_DEBUG) { if (consumed > 0) dbgln("consumed {} bytes", consumed); else diff --git a/Userland/Libraries/LibWeb/CodeGenerators/WrapperGenerator.cpp b/Userland/Libraries/LibWeb/CodeGenerators/WrapperGenerator.cpp index f817c323cc..7588af66f0 100644 --- a/Userland/Libraries/LibWeb/CodeGenerators/WrapperGenerator.cpp +++ b/Userland/Libraries/LibWeb/CodeGenerators/WrapperGenerator.cpp @@ -406,7 +406,7 @@ int main(int argc, char** argv) interface->fully_qualified_name = interface->name; } - if constexpr (debug_wrapper_generator) { + if constexpr (WRAPPER_GENERATOR_DEBUG) { dbgln("Attributes:"); for (auto& attribute : interface->attributes) { dbgln(" {}{}{} {}", diff --git a/Userland/Libraries/LibWeb/HTML/Parser/HTMLDocumentParser.cpp b/Userland/Libraries/LibWeb/HTML/Parser/HTMLDocumentParser.cpp index 4c1a6555cf..fff01fba46 100644 --- a/Userland/Libraries/LibWeb/HTML/Parser/HTMLDocumentParser.cpp +++ b/Userland/Libraries/LibWeb/HTML/Parser/HTMLDocumentParser.cpp @@ -141,7 +141,7 @@ void HTMLDocumentParser::run(const URL& url) break; auto& token = optional_token.value(); - dbgln<debug_parser>("[{}] {}", insertion_mode_name(), token.to_string()); + dbgln<PARSER_DEBUG>("[{}] {}", insertion_mode_name(), token.to_string()); // FIXME: If the adjusted current node is a MathML text integration point and the token is a start tag whose tag name is neither "mglyph" nor "malignmark" // FIXME: If the adjusted current node is a MathML text integration point and the token is a character token @@ -157,7 +157,7 @@ void HTMLDocumentParser::run(const URL& url) } if (m_stop_parsing) { - dbgln<debug_parser>("Stop parsing{}! :^)", m_parsing_fragment ? " fragment" : ""); + dbgln<PARSER_DEBUG>("Stop parsing{}! :^)", m_parsing_fragment ? " fragment" : ""); break; } } diff --git a/Userland/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp b/Userland/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp index 2ea70af30c..c0d3bf6a67 100644 --- a/Userland/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp +++ b/Userland/Libraries/LibWeb/HTML/Parser/HTMLTokenizer.cpp @@ -36,10 +36,10 @@ namespace Web::HTML { #pragma GCC diagnostic ignored "-Wunused-label" -#if TOKENIZER_TRACE -# define PARSE_ERROR() \ - do { \ - dbgln("Parse error (tokenization) {} @ {}", __PRETTY_FUNCTION__, __LINE__) \ +#if TOKENIZER_TRACE_DEBUG +# define PARSE_ERROR() \ + do { \ + dbgln("Parse error (tokenization) {} @ {}", __PRETTY_FUNCTION__, __LINE__); \ } while (0) #else # define PARSE_ERROR() @@ -221,7 +221,7 @@ Optional<u32> HTMLTokenizer::next_code_point() return {}; m_prev_utf8_iterator = m_utf8_iterator; ++m_utf8_iterator; - dbgln<debug_trace_tokenizer>("(Tokenizer) Next code_point: {}", (char)*m_prev_utf8_iterator); + dbgln<TOKENIZER_TRACE_DEBUG>("(Tokenizer) Next code_point: {}", (char)*m_prev_utf8_iterator); return *m_prev_utf8_iterator; } @@ -2618,17 +2618,17 @@ HTMLTokenizer::HTMLTokenizer(const StringView& input, const String& encoding) void HTMLTokenizer::will_switch_to([[maybe_unused]] State new_state) { - dbgln<debug_trace_tokenizer>("[{}] Switch to {}", state_name(m_state), state_name(new_state)); + dbgln<TOKENIZER_TRACE_DEBUG>("[{}] Switch to {}", state_name(m_state), state_name(new_state)); } void HTMLTokenizer::will_reconsume_in([[maybe_unused]] State new_state) { - dbgln<debug_trace_tokenizer>("[{}] Reconsume in {}", state_name(m_state), state_name(new_state)); + dbgln<TOKENIZER_TRACE_DEBUG>("[{}] Reconsume in {}", state_name(m_state), state_name(new_state)); } void HTMLTokenizer::switch_to(Badge<HTMLDocumentParser>, State new_state) { - dbgln<debug_trace_tokenizer>("[{}] Parser switches tokenizer state to {}", state_name(m_state), state_name(new_state)); + dbgln<TOKENIZER_TRACE_DEBUG>("[{}] Parser switches tokenizer state to {}", state_name(m_state), state_name(new_state)); m_state = new_state; } diff --git a/Userland/Libraries/LibWeb/Loader/ImageLoader.cpp b/Userland/Libraries/LibWeb/Loader/ImageLoader.cpp index ad4901587e..72b3bb3eb6 100644 --- a/Userland/Libraries/LibWeb/Loader/ImageLoader.cpp +++ b/Userland/Libraries/LibWeb/Loader/ImageLoader.cpp @@ -72,7 +72,7 @@ void ImageLoader::resource_did_load() m_loading_state = LoadingState::Loaded; - if constexpr (debug_image_loader) { + if constexpr (IMAGE_LOADER_DEBUG) { if (!resource()->has_encoded_data()) { dbgln("ImageLoader: Resource did load, no encoded data. URL: {}", resource()->url()); } else { diff --git a/Userland/Libraries/LibWeb/Loader/Resource.cpp b/Userland/Libraries/LibWeb/Loader/Resource.cpp index 666654a05f..c7ef689235 100644 --- a/Userland/Libraries/LibWeb/Loader/Resource.cpp +++ b/Userland/Libraries/LibWeb/Loader/Resource.cpp @@ -100,7 +100,7 @@ void Resource::did_load(Badge<ResourceLoader>, ReadonlyBytes data, const HashMap m_encoding = encoding_from_content_type(content_type.value()); m_mime_type = mime_type_from_content_type(content_type.value()); } else if (url().protocol() == "data" && !url().data_mime_type().is_empty()) { - dbgln<debug_resource>("This is a data URL with mime-type _{}_", url().data_mime_type()); + dbgln<RESOURCE_DEBUG>("This is a data URL with mime-type _{}_", url().data_mime_type()); m_encoding = "utf-8"; // FIXME: This doesn't seem nice. m_mime_type = url().data_mime_type(); } else { diff --git a/Userland/Libraries/LibWeb/Loader/ResourceLoader.cpp b/Userland/Libraries/LibWeb/Loader/ResourceLoader.cpp index ab39743fd8..41d0adf8f4 100644 --- a/Userland/Libraries/LibWeb/Loader/ResourceLoader.cpp +++ b/Userland/Libraries/LibWeb/Loader/ResourceLoader.cpp @@ -86,7 +86,7 @@ RefPtr<Resource> ResourceLoader::load_resource(Resource::Type type, const LoadRe if (it->value->type() != type) { dbgln("FIXME: Not using cached resource for {} since there's a type mismatch.", request.url()); } else { - dbgln<debug_cache>("Reusing cached resource for: {}", request.url()); + dbgln<CACHE_DEBUG>("Reusing cached resource for: {}", request.url()); return it->value; } } diff --git a/Userland/Libraries/LibWeb/SVG/SVGPathElement.cpp b/Userland/Libraries/LibWeb/SVG/SVGPathElement.cpp index a11ac8c7fd..35657ece06 100644 --- a/Userland/Libraries/LibWeb/SVG/SVGPathElement.cpp +++ b/Userland/Libraries/LibWeb/SVG/SVGPathElement.cpp @@ -38,7 +38,7 @@ namespace Web::SVG { static void print_instruction(const PathInstruction& instruction) { - ASSERT(debug_path); + ASSERT(PATH_DEBUG); auto& data = instruction.data; @@ -463,7 +463,7 @@ Gfx::Path& SVGPathElement::get_path() auto& absolute = instruction.absolute; auto& data = instruction.data; - if constexpr (debug_path) { + if constexpr (PATH_DEBUG) { print_instruction(instruction); } diff --git a/Userland/Libraries/LibWeb/WebContentClient.cpp b/Userland/Libraries/LibWeb/WebContentClient.cpp index 22f818f5e2..dd40e426be 100644 --- a/Userland/Libraries/LibWeb/WebContentClient.cpp +++ b/Userland/Libraries/LibWeb/WebContentClient.cpp @@ -56,7 +56,7 @@ void WebContentClient::handle([[maybe_unused]] const Messages::WebContentClient: void WebContentClient::handle(const Messages::WebContentClient::DidInvalidateContentRect& message) { - dbgln<debug_spam>("handle: WebContentClient::DidInvalidateContentRect! content_rect={}", message.content_rect()); + dbgln<SPAM_DEBUG>("handle: WebContentClient::DidInvalidateContentRect! content_rect={}", message.content_rect()); // FIXME: Figure out a way to coalesce these messages to reduce unnecessary painting m_view.notify_server_did_invalidate_content_rect({}, message.content_rect()); @@ -72,25 +72,25 @@ void WebContentClient::handle(const Messages::WebContentClient::DidChangeSelecti void WebContentClient::handle(const Messages::WebContentClient::DidLayout& message) { - dbgln<debug_spam>("handle: WebContentClient::DidLayout! content_size={}", message.content_size()); + dbgln<SPAM_DEBUG>("handle: WebContentClient::DidLayout! content_size={}", message.content_size()); m_view.notify_server_did_layout({}, message.content_size()); } void WebContentClient::handle(const Messages::WebContentClient::DidChangeTitle& message) { - dbgln<debug_spam>("handle: WebContentClient::DidChangeTitle! title={}", message.title()); + dbgln<SPAM_DEBUG>("handle: WebContentClient::DidChangeTitle! title={}", message.title()); m_view.notify_server_did_change_title({}, message.title()); } void WebContentClient::handle(const Messages::WebContentClient::DidRequestScrollIntoView& message) { - dbgln<debug_spam>("handle: WebContentClient::DidRequestScrollIntoView! rect={}", message.rect()); + dbgln<SPAM_DEBUG>("handle: WebContentClient::DidRequestScrollIntoView! rect={}", message.rect()); m_view.notify_server_did_request_scroll_into_view({}, message.rect()); } void WebContentClient::handle(const Messages::WebContentClient::DidHoverLink& message) { - dbgln<debug_spam>("handle: WebContentClient::DidHoverLink! url={}", message.url()); + dbgln<SPAM_DEBUG>("handle: WebContentClient::DidHoverLink! url={}", message.url()); m_view.notify_server_did_hover_link({}, message.url()); } |