using Astralis; using Invercargill; using Invercargill.DataStructures; namespace Spry.Tools { public class Mkssr : Object { private static bool show_version = false; private static string? output_file = null; private static string? content_type_override = null; private static string? resource_name_override = null; private static bool generate_vala = false; private static string? namespace_name = null; private const OptionEntry[] options = { { "output", 'o', 0, OptionArg.FILENAME, ref output_file, "Output file name (default: input.ssr or ClassNameResource.vala)", "FILE" }, { "content-type", 'c', 0, OptionArg.STRING, ref content_type_override, "Override content type (e.g., text/html)", "TYPE" }, { "name", 'n', 0, OptionArg.STRING, ref resource_name_override, "Override resource name (default: input filename)", "NAME" }, { "vala", '\0', 0, OptionArg.NONE, ref generate_vala, "Generate Vala source file instead of SSR", null }, { "ns", '\0', 0, OptionArg.STRING, ref namespace_name, "Namespace for generated Vala class (requires --vala)", "NAMESPACE" }, { "version", 'v', 0, OptionArg.NONE, ref show_version, "Show version information", null }, { null } }; public static int main(string[] args) { try { var opt_context = new OptionContext("INPUT_FILE - Generate a Spry Static Resource file"); opt_context.set_help_enabled(true); opt_context.add_main_entries(options, null); opt_context.parse(ref args); } catch (OptionError e) { stderr.printf("Error: %s\n", e.message); stderr.printf("Run '%s --help' for more information.\n", args[0]); return 1; } if (show_version) { stdout.printf("spry-mkssr 0.1\n"); return 0; } // Validate --ns requires --vala if (namespace_name != null && !generate_vala) { stderr.printf("Error: --ns requires --vala flag.\n"); return 1; } // Get input file from remaining arguments string? input_file = null; if (args.length > 1) { input_file = args[1]; } if (input_file == null) { stderr.printf("Error: No input file specified.\n"); stderr.printf("Run '%s --help' for more information.\n", args[0]); return 1; } // Determine output file and class name base string actual_output; string? class_name_base = null; if (output_file != null) { actual_output = output_file; } else if (generate_vala) { // Generate Vala filename from resource name (if -n specified) or input file class_name_base = resource_name_override ?? Path.get_basename(input_file); actual_output = make_pascal_case(class_name_base) + "Resource.vala"; } else { actual_output = input_file + ".ssr"; } try { var tool = new Mkssr(); if (generate_vala) { tool.generate_vala_source(input_file, actual_output, content_type_override, resource_name_override, namespace_name, class_name_base); } else { tool.generate_ssr(input_file, actual_output, content_type_override, resource_name_override); } stdout.printf("Generated: %s\n", actual_output); return 0; } catch (Error e) { stderr.printf("Error: %s\n", e.message); return 1; } } public void generate_ssr(string input_path, string output_path, string? content_type_override, string? resource_name_override) throws Error { // Read input file var input_file = File.new_for_path(input_path); if (!input_file.query_exists()) { throw new IOError.NOT_FOUND(@"Input file '$input_path' does not exist"); } var file_info = input_file.query_info("standard::size", 0); var file_size = (size_t)file_info.get_size(); var input_stream = new DataInputStream(input_file.read()); var input_bytes = input_stream.read_bytes(file_size); input_stream.close(); // Get the data as uint8[] (copy to ensure we own it) var input_data = new uint8[input_bytes.get_size()]; Memory.copy(input_data, input_bytes.get_data(), input_bytes.get_size()); // Get file name and content type var name = resource_name_override ?? Path.get_basename(input_path); string content_type; if (content_type_override != null) { content_type = content_type_override; } else { content_type = guess_content_type(name, input_data); } // Compute hash for ETag (SHA-512 = 64 bytes) var hash = compute_hash(input_data); // Compress with all encodings at highest compression using Astralis compressors var encodings = new List(); // Identity (no compression) - always included var identity_data = new uint8[input_data.length]; Memory.copy(identity_data, input_data, input_data.length); encodings.append(new EncodedData("identity", (owned)identity_data)); // Create ByteBuffer from input data for compression var input_buffer = new ByteBuffer.from_byte_array((owned)input_data); // GZip at highest compression stdout.printf("Compressing with gzip...\n"); var gzip_compressor = new GzipCompressor(9); var gzip_compressed = gzip_compressor.compress_buffer(input_buffer, null); var gzip_data = gzip_compressed.to_array(); stdout.printf(" gzip: %zu -> %zu bytes (%.1f%%)\n", file_size, gzip_compressed.length, 100.0 * gzip_compressed.length / file_size); // Only add if smaller than original if (gzip_compressed.length < file_size) { encodings.append(new EncodedData("gzip", (owned)gzip_data)); } else { stdout.printf(" Skipping gzip (not smaller than original)\n"); } // Zstandard at highest compression stdout.printf("Compressing with zstd...\n"); var zstd_compressor = new ZstdCompressor(19); var zstd_compressed = zstd_compressor.compress_buffer(input_buffer, null); var zstd_data = zstd_compressed.to_array(); stdout.printf(" zstd: %zu -> %zu bytes (%.1f%%)\n", file_size, zstd_compressed.length, 100.0 * zstd_compressed.length / file_size); // Only add if smaller than original if (zstd_compressed.length < file_size) { encodings.append(new EncodedData("zstd", (owned)zstd_data)); } else { stdout.printf(" Skipping zstd (not smaller than original)\n"); } // Brotli at highest compression stdout.printf("Compressing with brotli...\n"); var brotli_compressor = new BrotliCompressor(11); var brotli_compressed = brotli_compressor.compress_buffer(input_buffer, null); var brotli_data = brotli_compressed.to_array(); stdout.printf(" brotli: %zu -> %zu bytes (%.1f%%)\n", file_size, brotli_compressed.length, 100.0 * brotli_compressed.length / file_size); // Only add if smaller than original if (brotli_compressed.length < file_size) { encodings.append(new EncodedData("br", (owned)brotli_data)); } else { stdout.printf(" Skipping brotli (not smaller than original)\n"); } // Write output file write_ssr_file(output_path, name, content_type, hash, encodings); } public void generate_vala_source(string input_path, string output_path, string? content_type_override, string? resource_name_override, string? namespace_name, string? class_name_base) throws Error { // Read input file var input_file = File.new_for_path(input_path); if (!input_file.query_exists()) { throw new IOError.NOT_FOUND(@"Input file '$input_path' does not exist"); } var file_info = input_file.query_info("standard::size", 0); var file_size = (size_t)file_info.get_size(); var input_stream = new DataInputStream(input_file.read()); var input_bytes = input_stream.read_bytes(file_size); input_stream.close(); // Get the data as uint8[] (copy to ensure we own it) var input_data = new uint8[input_bytes.get_size()]; Memory.copy(input_data, input_bytes.get_data(), input_bytes.get_size()); // Get file name and content type var name = resource_name_override ?? Path.get_basename(input_path); string content_type; if (content_type_override != null) { content_type = content_type_override; } else { content_type = guess_content_type(name, input_data); } // Compute hash for ETag (SHA-512 hex string) var hash_hex = compute_hash_hex(input_data); // Determine class name: use class_name_base if provided (from -n flag), otherwise from output path string class_name; if (class_name_base != null) { class_name = make_pascal_case(class_name_base) + "Resource"; } else { class_name = Path.get_basename(output_path); if (class_name.has_suffix(".vala")) { class_name = class_name.substring(0, class_name.length - 5); } } // Compress with all encodings at highest compression using Astralis compressors var encodings = new List(); // Identity (no compression) - always included var identity_data = new uint8[input_data.length]; Memory.copy(identity_data, input_data, input_data.length); encodings.append(new EncodedData("identity", (owned)identity_data)); // Create ByteBuffer from input data for compression var input_buffer = new ByteBuffer.from_byte_array((owned)input_data); // GZip at highest compression stdout.printf("Compressing with gzip...\n"); var gzip_compressor = new GzipCompressor(9); var gzip_compressed = gzip_compressor.compress_buffer(input_buffer, null); var gzip_data = gzip_compressed.to_array(); stdout.printf(" gzip: %zu -> %zu bytes (%.1f%%)\n", file_size, gzip_compressed.length, 100.0 * gzip_compressed.length / file_size); // Only add if smaller than original if (gzip_compressed.length < file_size) { encodings.append(new EncodedData("gzip", (owned)gzip_data)); } else { stdout.printf(" Skipping gzip (not smaller than original)\n"); } // Zstandard at highest compression stdout.printf("Compressing with zstd...\n"); var zstd_compressor = new ZstdCompressor(19); var zstd_compressed = zstd_compressor.compress_buffer(input_buffer, null); var zstd_data = zstd_compressed.to_array(); stdout.printf(" zstd: %zu -> %zu bytes (%.1f%%)\n", file_size, zstd_compressed.length, 100.0 * zstd_compressed.length / file_size); // Only add if smaller than original if (zstd_compressed.length < file_size) { encodings.append(new EncodedData("zstd", (owned)zstd_data)); } else { stdout.printf(" Skipping zstd (not smaller than original)\n"); } // Brotli at highest compression stdout.printf("Compressing with brotli...\n"); var brotli_compressor = new BrotliCompressor(11); var brotli_compressed = brotli_compressor.compress_buffer(input_buffer, null); var brotli_data = brotli_compressed.to_array(); stdout.printf(" brotli: %zu -> %zu bytes (%.1f%%)\n", file_size, brotli_compressed.length, 100.0 * brotli_compressed.length / file_size); // Only add if smaller than original if (brotli_compressed.length < file_size) { encodings.append(new EncodedData("br", (owned)brotli_data)); } else { stdout.printf(" Skipping brotli (not smaller than original)\n"); } // Write Vala source file write_vala_file(output_path, class_name, name, content_type, hash_hex, encodings, namespace_name); } private string guess_content_type(string filename, uint8[] sample_data) { bool result_uncertain; var content_type = ContentType.guess(filename, sample_data, out result_uncertain); var mime = ContentType.get_mime_type(content_type); return mime ?? "application/octet-stream"; } private uint8[] compute_hash(uint8[] data) { // Use SHA-512 for the hash (64 bytes) var checksum = new Checksum(ChecksumType.SHA512); checksum.update(data, data.length); // Get raw bytes of the hash var hex_string = checksum.get_string(); var hash_bytes = new uint8[64]; for (var i = 0; i < 64; i++) { var hex_byte = hex_string.substring(i * 2, 2); // Parse hex byte manually int val = 0; for (int j = 0; j < 2; j++) { val *= 16; var c = hex_byte.get(j); if (c >= '0' && c <= '9') { val += c - '0'; } else if (c >= 'a' && c <= 'f') { val += c - 'a' + 10; } else if (c >= 'A' && c <= 'F') { val += c - 'A' + 10; } } hash_bytes[i] = (uint8)val; } return hash_bytes; } private string compute_hash_hex(uint8[] data) { var checksum = new Checksum(ChecksumType.SHA512); checksum.update(data, data.length); return checksum.get_string(); } private void write_ssr_file(string path, string name, string content_type, uint8[] hash, List encodings) throws Error { var output_file = File.new_for_path(path); var output_stream = new DataOutputStream(output_file.replace(null, false, FileCreateFlags.NONE)); // Write magic number: "spry-sr\0" output_stream.put_byte('s'); output_stream.put_byte('p'); output_stream.put_byte('r'); output_stream.put_byte('y'); output_stream.put_byte('-'); output_stream.put_byte('s'); output_stream.put_byte('r'); output_stream.put_byte(0); // Write name field write_string_field(output_stream, name); // Write content type field write_string_field(output_stream, content_type); // Write hash (64 bytes) output_stream.write(hash); // Calculate header size to determine starting offset var header_size = 8; // magic header_size += 1 + name.data.length; // name field header_size += 1 + content_type.data.length; // content type field header_size += 64; // hash header_size += 1; // encoding count byte // Add size of each encoding header foreach (var encoding in encodings) { header_size += 1 + encoding.type.data.length; // type string field header_size += 8; // offset header_size += 8; // size } // Write encoding count output_stream.put_byte((uint8)encodings.length()); // Write encoding headers with offsets uint64 current_offset = header_size; foreach (var encoding in encodings) { write_string_field(output_stream, encoding.type); output_stream.put_uint64(current_offset); output_stream.put_uint64(encoding.data.length); current_offset += encoding.data.length; } // Write encoding data foreach (var encoding in encodings) { output_stream.write(encoding.data); } output_stream.close(); } private void write_vala_file(string path, string class_name, string name, string content_type, string hash_hex, List encodings, string? namespace_name) throws Error { var output_file = File.new_for_path(path); var output_stream = new DataOutputStream(output_file.replace(null, false, FileCreateFlags.NONE)); // Determine indentation based on namespace string indent = namespace_name != null ? " " : ""; string indent2 = namespace_name != null ? " " : " "; string indent3 = namespace_name != null ? " " : " "; string indent4 = namespace_name != null ? " " : " "; // Write using statements output_stream.put_string("using Spry;\n"); output_stream.put_string("using Invercargill;\n\n"); // Write namespace if provided if (namespace_name != null) { output_stream.put_string(@"namespace $(namespace_name) {\n\n"); } // Write comment and class declaration output_stream.put_string(@"$(indent)// Generated by spry-mkssr\n"); output_stream.put_string(@"$(indent)public class $(class_name) : ConstantStaticResource {\n\n"); // Write properties output_stream.put_string(@"$(indent2)public override string name { get { return \"$name\"; } }\n"); output_stream.put_string(@"$(indent2)public override string file_hash { get { return \"$hash_hex\"; } }\n"); output_stream.put_string(@"$(indent2)public override string content_type { get { return \"$content_type\"; } }\n\n"); // Write get_best_encoding method output_stream.put_string(@"$(indent2)public override string get_best_encoding(Set supported) {\n"); // Sort encodings by size (smallest first) - build a sorted array var sorted_encodings = new EncodedData[encodings.length()]; int idx = 0; foreach (var enc in encodings) { sorted_encodings[idx++] = enc; } // Simple bubble sort by size for (int i = 0; i < sorted_encodings.length - 1; i++) { for (int j = i + 1; j < sorted_encodings.length; j++) { if (sorted_encodings[i].data.length > sorted_encodings[j].data.length) { var tmp = sorted_encodings[i]; sorted_encodings[i] = sorted_encodings[j]; sorted_encodings[j] = tmp; } } } // Check in order of smallest to largest foreach (var encoding in sorted_encodings) { if (encoding.type == "identity") continue; // Handle identity last output_stream.put_string(@"$(indent3)if (supported.has(\"$(encoding.type)\")) return \"$(encoding.type)\";\n"); } output_stream.put_string(@"$(indent3)return \"identity\";\n"); output_stream.put_string(@"$(indent2)}\n\n"); // Write get_encoding method output_stream.put_string(@"$(indent2)public override unowned uint8[] get_encoding(string encoding) {\n"); output_stream.put_string(@"$(indent3)switch (encoding) {\n"); foreach (var encoding in encodings) { var var_name = encoding_name_to_const(encoding.type); output_stream.put_string(@"$(indent4)case \"$(encoding.type)\": return $var_name;\n"); } output_stream.put_string(@"$(indent4)default: return IDENTITY_DATA;\n"); output_stream.put_string(@"$(indent3)}\n"); output_stream.put_string(@"$(indent2)}\n\n"); // Write const arrays for each encoding foreach (var encoding in encodings) { var var_name = encoding_name_to_const(encoding.type); output_stream.put_string(@"$(indent2)private const uint8[] $var_name = {\n"); write_byte_array(output_stream, encoding.data, indent3); output_stream.put_string(@"\n$(indent2)};\n\n"); } // Close class output_stream.put_string(@"$(indent)}\n"); // Close namespace if provided if (namespace_name != null) { output_stream.put_string("}\n"); } output_stream.close(); } private string encoding_name_to_const(string encoding_type) { // Convert encoding type to ALL_CAPS const name // e.g., "identity" -> "IDENTITY_DATA", "gzip" -> "GZIP_DATA", "br" -> "BR_DATA" var upper = encoding_type.replace("-", "_").up(); return @"$(upper)_DATA"; } private void write_byte_array(DataOutputStream stream, uint8[] data, string indent) throws Error { for (int i = 0; i < data.length; i++) { if (i > 0) { if (i % 16 == 0) { stream.put_string(",\n" + indent); } else { stream.put_string(", "); } } else { stream.put_string(indent); } stream.put_string("0x%02x".printf(data[i])); } } private void write_string_field(DataOutputStream stream, string value) throws Error { var bytes = value.data; if (bytes.length > 255) { throw new IOError.INVALID_DATA(@"String field too long: $(value.length) bytes (max 255)"); } stream.put_byte((uint8)bytes.length); stream.write(bytes); } private static string make_pascal_case(string input) { // Remove extension first var name = input; var dot_pos = name.last_index_of("."); if (dot_pos > 0) { name = name.substring(0, dot_pos); } // Split on common separators and capitalize each word var result = new StringBuilder(); var capitalize_next = true; foreach (var c in name.to_utf8()) { if (c == '-' || c == '_' || c == ' ' || c == '.') { capitalize_next = true; } else { if (capitalize_next) { result.append(c.toupper().to_string()); capitalize_next = false; } else { result.append(c.to_string()); } } } return result.str; } private class EncodedData { public string type; public uint8[] data; public EncodedData(string type, owned uint8[] data) { this.type = type; this.data = (owned)data; } } } }