From 8465683dcf1ede4dbab46915113dd2ce4e4b7dfb Mon Sep 17 00:00:00 2001 From: asynts Date: Sat, 23 Jan 2021 23:59:27 +0100 Subject: Everywhere: Debug macros instead of constexpr. This was done with the following script: find . \( -name '*.cpp' -o -name '*.h' -o -name '*.in' \) -not -path './Toolchain/*' -not -path './Build/*' -exec sed -i -E 's/dbgln/dbgln<\U\1_DEBUG>/' {} \; find . \( -name '*.cpp' -o -name '*.h' -o -name '*.in' \) -not -path './Toolchain/*' -not -path './Build/*' -exec sed -i -E 's/if constexpr \(debug_([a-z0-9_]+)/if constexpr \(\U\1_DEBUG/' {} \; --- Userland/Libraries/LibHTTP/Job.cpp | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) (limited to 'Userland/Libraries/LibHTTP/Job.cpp') diff --git a/Userland/Libraries/LibHTTP/Job.cpp b/Userland/Libraries/LibHTTP/Job.cpp index 0c69848dab..eb5164daa3 100644 --- a/Userland/Libraries/LibHTTP/Job.cpp +++ b/Userland/Libraries/LibHTTP/Job.cpp @@ -36,14 +36,14 @@ namespace HTTP { static ByteBuffer handle_content_encoding(const ByteBuffer& buf, const String& content_encoding) { - dbgln("Job::handle_content_encoding: buf has content_encoding={}", content_encoding); + dbgln("Job::handle_content_encoding: buf has content_encoding={}", content_encoding); if (content_encoding == "gzip") { if (!Core::Gzip::is_compressed(buf)) { dbgln("Job::handle_content_encoding: buf is not gzip compressed!"); } - dbgln("Job::handle_content_encoding: buf is gzip compressed!"); + dbgln("Job::handle_content_encoding: buf is gzip compressed!"); auto uncompressed = Core::Gzip::decompress(buf); if (!uncompressed.has_value()) { @@ -51,7 +51,7 @@ static ByteBuffer handle_content_encoding(const ByteBuffer& buf, const String& c return buf; } - if constexpr (debug_job) { + if constexpr (JOB_DEBUG) { dbgln("Job::handle_content_encoding: Gzip::decompress() successful."); dbgln(" Input size: {}", buf.size()); dbgln(" Output size: {}", uncompressed.value().size()); @@ -77,7 +77,7 @@ void Job::flush_received_buffers() { if (!m_can_stream_response || m_buffered_size == 0) return; - dbgln("Job: Flushing received buffers: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); + dbgln("Job: Flushing received buffers: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); for (size_t i = 0; i < m_received_buffers.size(); ++i) { auto& payload = m_received_buffers[i]; auto written = do_write(payload); @@ -92,7 +92,7 @@ void Job::flush_received_buffers() payload = payload.slice(written, payload.size() - written); break; } - dbgln("Job: Flushing received buffers done: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); + dbgln("Job: Flushing received buffers done: have {} bytes in {} buffers", m_buffered_size, m_received_buffers.size()); } void Job::on_socket_connected() @@ -103,7 +103,7 @@ void Job::on_socket_connected() m_sent_data = true; auto raw_request = m_request.to_raw_request(); - if constexpr (debug_job) { + if constexpr (JOB_DEBUG) { dbgln("Job: raw_request:"); dbgln("{}", String::copy(raw_request)); } @@ -198,10 +198,10 @@ void Job::on_socket_connected() m_headers.set(name, value); if (name.equals_ignoring_case("Content-Encoding")) { // Assume that any content-encoding means that we can't decode it as a stream :( - dbgln("Content-Encoding {} detected, cannot stream output :(", value); + dbgln("Content-Encoding {} detected, cannot stream output :(", value); m_can_stream_response = false; } - dbgln("Job: [{}] = '{}'", name, value); + dbgln("Job: [{}] = '{}'", name, value); return; } ASSERT(m_state == State::InBody); @@ -216,7 +216,7 @@ void Job::on_socket_connected() // read size auto size_data = read_line(PAGE_SIZE); auto size_lines = size_data.view().lines(); - dbgln("Job: Received a chunk with size '{}'", size_data); + dbgln("Job: Received a chunk with size '{}'", size_data); if (size_lines.size() == 0) { dbgln("Job: Reached end of stream"); finish_up(); @@ -239,26 +239,26 @@ void Job::on_socket_connected() m_current_chunk_total_size = 0; m_current_chunk_remaining_size = 0; - dbgln("Job: Received the last chunk with extensions '{}'", size_string.substring_view(1, size_string.length() - 1)); + dbgln("Job: Received the last chunk with extensions '{}'", size_string.substring_view(1, size_string.length() - 1)); } else { m_current_chunk_total_size = size; m_current_chunk_remaining_size = size; read_size = size; - dbgln("Job: Chunk of size '{}' started", size); + dbgln("Job: Chunk of size '{}' started", size); } } } else { read_size = remaining; - dbgln("Job: Resuming chunk with '{}' bytes left over", remaining); + dbgln("Job: Resuming chunk with '{}' bytes left over", remaining); } } else { auto transfer_encoding = m_headers.get("Transfer-Encoding"); if (transfer_encoding.has_value()) { auto encoding = transfer_encoding.value(); - dbgln("Job: This content has transfer encoding '{}'", encoding); + dbgln("Job: This content has transfer encoding '{}'", encoding); if (encoding.equals_ignoring_case("chunked")) { m_current_chunk_remaining_size = -1; goto read_chunk_size; @@ -289,9 +289,9 @@ void Job::on_socket_connected() if (m_current_chunk_remaining_size.has_value()) { auto size = m_current_chunk_remaining_size.value() - payload.size(); - dbgln("Job: We have {} bytes left over in this chunk", size); + dbgln("Job: We have {} bytes left over in this chunk", size); if (size == 0) { - dbgln("Job: Finished a chunk of {} bytes", m_current_chunk_total_size.value()); + dbgln("Job: Finished a chunk of {} bytes", m_current_chunk_total_size.value()); if (m_current_chunk_total_size.value() == 0) { m_state = State::Trailers; @@ -302,7 +302,7 @@ void Job::on_socket_connected() size = -1; [[maybe_unused]] auto line = read_line(PAGE_SIZE); - if constexpr (debug_job) + if constexpr (JOB_DEBUG) dbgln("Line following (should be empty): '{}'", line); } m_current_chunk_remaining_size = size; -- cgit v1.2.3