using Invercargill; using Invercargill.DataStructures; using Inversion; namespace Astralis { public abstract class Compressor : Object, PipelineComponent { protected uint64 max_buffer_size = 1024 * 1024 * 16; protected Compressor(uint64 max_buffer_size = 1024 * 1024 * 16) { this.max_buffer_size = max_buffer_size; } // The encoding token that identifies this compression format (e.g., "gzip") public abstract string encoding_token { get; } public async HttpResult process_request(HttpContext http_context, PipelineContext pipeline_context) throws Error { var result = yield pipeline_context.next(); // Check existing encoding on http result (to avoid double encoding) // Do not compress if: DO_NOT_COMPRESS flag set, or `Content-Encoding` is set and is not "identity" var existing_encoding = http_context.request.headers.get_any_or_default("Content-Encoding"); if (result.flag_is_set(HttpResultFlag.DO_NOT_COMPRESS) || (existing_encoding != null && existing_encoding != "identity")) { return result; } var accept_encoding = http_context.request.headers.get_any_or_default("Accept-Encoding"); // Check if the client accepts this encoding if (accept_encoding == null || !accept_encoding.contains(encoding_token)) { return result; } // Don't event bother compressing if the length is less than 10 bytes (length of GZip header) if(result.content_length != null && result.content_length <= 10) { return result; } // Case 1: Content length known and within threshold -> buffer and compress if (result.content_length != null && result.content_length <= max_buffer_size) { var buffered_result = yield buffer_and_compress(result); if (buffered_result != null) { buffered_result.set_header("Content-Encoding", encoding_token); buffered_result.set_header("Vary", "Accept-Encoding"); return buffered_result; } return result; } // Case 2: Content length known and above threshold, and `DO_NOT_CHUNK` is set -> do nothing else if(result.content_length != null && result.flag_is_set(HttpResultFlag.DO_NOT_CHUNK)) { return result; } // Case 3: Content length above threshold or is unknown -> send chunked response (no Content-Length header) var streaming_result = compress_chunked(result); streaming_result.set_header("Content-Encoding", encoding_token); streaming_result.set_header("Vary", "Accept-Encoding"); return streaming_result; } /// Compress a ByteBuffer of data, returning null if compression doesn't reduce size /// @param data The data to compress /// @param content_type Optional content type hint for compression optimization /// @return Compressed data as ByteBuffer, or null if compression doesn't reduce size public abstract ByteBuffer compress_buffer(ByteBuffer data, string? content_type) throws Error; /// Create a streaming compression result for the given inner result, or null if not compressible public abstract HttpResult compress_chunked(HttpResult inner_result) throws Error; /// Buffer and compress the result, returning null if compression doesn't reduce size private async HttpResult? buffer_and_compress(HttpResult inner_result) throws Error { // Buffer the input data var input_buffer = new BufferAsyncOutput(); yield inner_result.send_body(input_buffer); var input_data = input_buffer.get_buffer(); // Get content type for potential compression optimization string? content_type = null; if (inner_result.headers.has("Content-Type")) { content_type = inner_result.headers["Content-Type"]; } // Compress the data ByteBuffer? compressed_data = compress_buffer(input_data, content_type); if (compressed_data == null) { return null; } // Check if compressed size is larger than or equal to original if (compressed_data.length >= input_data.length) { return null; } // Construct HttpDataResult and copy headers var http_result = new HttpDataResult(compressed_data, inner_result.status); copy_headers(inner_result, http_result, {"content-length", "content-encoding"}); return http_result; } protected static void copy_headers(HttpResult? source, HttpResult dest, string[] skip_headers) { if (source == null) { return; } foreach (var header in source.headers) { var lower_key = header.key.down(); bool skip = false; foreach (var skip_key in skip_headers) { if (lower_key == skip_key) { skip = true; break; } } if (!skip) { dest.set_header(header.key, header.value); } } } } }