using Invercargill.DataStructures; using Invercargill.Expressions; using InvercargillSql.Migrations; using InvercargillSql.Orm; using InvercargillSql.Orm.Projections; namespace InvercargillSql.Dialects { /** * SQLite implementation of the SqlDialect interface. * * SqliteDialect provides SQLite-specific type translations, * SQL generation methods, and schema introspection for SQLite databases. */ public class SqliteDialect : Object, SqlDialect { /** * Translates a ColumnType to SQLite's type system. * * SQLite uses dynamic typing but these are the recommended * storage classes for each column type. * * @param type The column type to translate * @return The SQLite type string */ public string translate_type(ColumnType type) { switch (type) { case ColumnType.INT_32: case ColumnType.INT_64: case ColumnType.BOOLEAN: return "INTEGER"; case ColumnType.TEXT: case ColumnType.UUID: return "TEXT"; case ColumnType.DECIMAL: return "REAL"; case ColumnType.DATETIME: // Unix epoch stored as integer return "INTEGER"; case ColumnType.BINARY: return "BLOB"; default: return "TEXT"; } } /** * Translates an expression tree to SQL. * * Currently returns empty string - will be implemented * with ExpressionToSqlVisitor in a future phase. * * @param expr The expression to translate * @param mapper The entity mapper for column resolution * @return The SQL string representation */ public string translate_expression(Expression expr, EntityMapper mapper) { // For now, return empty - will be implemented with ExpressionToSqlVisitor return ""; } /** * Builds a SELECT statement for all rows in a table. * * @param table_name The name of the table * @return The SQL SELECT statement */ public string build_select_all(string table_name) { return "SELECT * FROM %s".printf(table_name); } /** * Builds a SELECT statement for a single row by ID. * * @param table_name The name of the table * @param id_column The name of the ID column * @return The SQL SELECT statement with named parameter placeholder */ public string build_select_by_id(string table_name, string id_column) { return "SELECT * FROM %s WHERE %s = :%s".printf(table_name, id_column, id_column); } /** * Builds an INSERT statement with named parameter placeholders. * * @param table_name The name of the table * @param columns The column names to insert into * @return The SQL INSERT statement with named parameter placeholders */ public string build_insert_sql(string table_name, Vector columns) { var cols = new string[0]; var placeholders = new string[0]; foreach (var col in columns) { cols += col; placeholders += ":" + col; } return "INSERT INTO %s (%s) VALUES (%s)".printf( table_name, string.joinv(", ", cols), string.joinv(", ", placeholders) ); } /** * Builds an UPDATE statement with named parameter placeholders. * * The ID column is excluded from the SET clause and used * only in the WHERE clause. * * @param table_name The name of the table * @param columns The column names to update * @param id_column The name of the ID column for the WHERE clause * @return The SQL UPDATE statement with named parameter placeholders */ public string build_update_sql(string table_name, Vector columns, string id_column) { var set_clauses = new string[0]; foreach (var col in columns) { if (col != id_column) { set_clauses += "%s = :%s".printf(col, col); } } return "UPDATE %s SET %s WHERE %s = :%s".printf( table_name, string.joinv(", ", set_clauses), id_column, id_column ); } /** * Builds a DELETE statement with named parameter placeholder. * * @param table_name The name of the table * @param id_column The name of the ID column for the WHERE clause * @return The SQL DELETE statement with named parameter placeholder */ public string build_delete_sql(string table_name, string id_column) { return "DELETE FROM %s WHERE %s = :%s".printf(table_name, id_column, id_column); } /** * Builds a CREATE TABLE statement. * * @param table_name The name of the table * @param columns The column definitions * @return The SQL CREATE TABLE statement */ public string build_create_table(string table_name, Vector columns) { var column_defs = new string[0]; foreach (var col in columns) { column_defs += build_column_def(col); } return "CREATE TABLE %s (%s)".printf( table_name, string.joinv(", ", column_defs) ); } /** * Builds a CREATE INDEX statement. * * @param table_name The name of the table * @param index The index definition * @return The SQL CREATE INDEX statement */ public string build_create_index(string table_name, IndexDefinition index) { var unique = index.is_unique ? "UNIQUE " : ""; var cols = new string[0]; foreach (var col in index.columns) { cols += col; } return "CREATE %sINDEX %s ON %s (%s)".printf( unique, index.name, table_name, string.joinv(", ", cols) ); } /** * Builds a single column definition for CREATE TABLE. * * @param col The column definition * @return The SQL column definition string */ private string build_column_def(ColumnDefinition col) { var parts = new string[0]; parts += col.name; parts += translate_type(col.column_type); if (col.is_primary_key) { parts += "PRIMARY KEY"; } if (col.auto_increment && col.is_primary_key) { parts += "AUTOINCREMENT"; } if (col.is_required && !col.is_primary_key) { parts += "NOT NULL"; } if (col.is_unique && !col.is_primary_key) { parts += "UNIQUE"; } if (col.default_now) { parts += "DEFAULT (strftime('%%s', 'now'))"; } return string.joinv(" ", parts); } // Migration DDL method implementations /** * Generates SQL for a CREATE TABLE operation with constraints. * * @param op The create table operation * @return The SQL CREATE TABLE statement */ public string create_table_sql(CreateTableOperation op) { var sql = new StringBuilder(); sql.append("CREATE TABLE "); sql.append(op.table_name); sql.append(" (\n"); var parts = new Vector(); foreach (var col in op.columns) { parts.add(build_column_sql(col)); } foreach (var constraint in op.constraints) { parts.add(build_constraint_sql(constraint)); } bool first = true; foreach (var part in parts) { if (!first) { sql.append(",\n"); } first = false; sql.append(" "); sql.append(part); } sql.append("\n)"); return sql.str; } /** * Builds a column definition SQL string for migration operations. * * @param col The column definition * @return The SQL column definition string */ private string build_column_sql(ColumnDefinition col) { var parts = new Vector(); parts.add(col.name); parts.add(translate_type(col.column_type)); if (col.is_primary_key) parts.add("PRIMARY KEY"); if (col.auto_increment) parts.add("AUTOINCREMENT"); if (col.is_required) parts.add("NOT NULL"); if (col.is_unique) parts.add("UNIQUE"); if (col.default_value != null) { parts.add("DEFAULT " + element_to_sql(col.default_value)); } if (col.default_now) { parts.add("DEFAULT (strftime('%s', 'now'))"); } return string.joinv(" ", parts.to_array()); } /** * Converts an Element to its SQL representation for default values. * * @param element The element to convert * @return The SQL string representation */ private string element_to_sql(Invercargill.Element element) { if (element.is_null()) { return "NULL"; } // Try string first (needs quoting) if (element.assignable_to_type(typeof(string))) { string? s; if (element.try_get_as(out s) && s != null) { return "'" + s.replace("'", "''") + "'"; } } // Try bool (convert to 0/1) if (element.assignable_to_type(typeof(bool))) { bool? b; if (element.try_get_as(out b) && b != null) { return b ? "1" : "0"; } } // Try int64 if (element.assignable_to_type(typeof(int64))) { int64? i; if (element.try_get_as(out i) && i != null) { return i.to_string(); } } // Try int if (element.assignable_to_type(typeof(int))) { int? i; if (element.try_get_as(out i) && i != null) { return i.to_string(); } } // Try long if (element.assignable_to_type(typeof(long))) { long? l; if (element.try_get_as(out l) && l != null) { return l.to_string(); } } // Try double if (element.assignable_to_type(typeof(double))) { double? d; if (element.try_get_as(out d) && d != null) { return d.to_string(); } } // Fallback to NULL return "NULL"; } /** * Builds a table constraint SQL string. * * @param constraint The table constraint * @return The SQL constraint string */ private string build_constraint_sql(TableConstraint constraint) { var sql = new StringBuilder(); if (constraint.name != null) { sql.append("CONSTRAINT "); sql.append(constraint.name); sql.append(" "); } sql.append(constraint.constraint_type); sql.append(" ("); sql.append(string.joinv(", ", constraint.columns.to_array())); sql.append(")"); if (constraint.constraint_type == "FOREIGN KEY" && constraint.reference_table != null) { sql.append(" REFERENCES "); sql.append(constraint.reference_table); sql.append(" ("); sql.append(string.joinv(", ", constraint.reference_columns.to_array())); sql.append(")"); // Add ON DELETE action if not NO_ACTION (default) if (constraint.on_delete_action != ReferentialAction.NO_ACTION) { sql.append(" ON DELETE "); sql.append(constraint.on_delete_action.to_sql()); } // Add ON UPDATE action if not NO_ACTION (default) if (constraint.on_update_action != ReferentialAction.NO_ACTION) { sql.append(" ON UPDATE "); sql.append(constraint.on_update_action.to_sql()); } } return sql.str; } /** * Generates SQL for a DROP TABLE operation. * * @param op The drop table operation * @return The SQL DROP TABLE statement */ public string drop_table_sql(DropTableOperation op) { return "DROP TABLE IF EXISTS " + op.table_name; } /** * Generates SQL for an ADD COLUMN operation. * * When the operation has a foreign_key_constraint, the REFERENCES clause * is included inline in the column definition. * * @param op The add column operation * @return The SQL ALTER TABLE ADD COLUMN statement */ public string add_column_sql(AddColumnOperation op) { var sb = new StringBuilder(); sb.append("ALTER TABLE "); sb.append(op.table_name); sb.append(" ADD COLUMN "); sb.append(build_column_sql(op.column)); // Add inline REFERENCES if FK constraint is set if (op.foreign_key_constraint != null) { var fk = op.foreign_key_constraint; sb.append(" REFERENCES "); sb.append(fk.reference_table); sb.append(" ("); sb.append(string.joinv(", ", fk.reference_columns.to_array())); sb.append(")"); if (fk.on_delete_action != ReferentialAction.NO_ACTION) { sb.append(" ON DELETE "); sb.append(fk.on_delete_action.to_sql()); } if (fk.on_update_action != ReferentialAction.NO_ACTION) { sb.append(" ON UPDATE "); sb.append(fk.on_update_action.to_sql()); } } return sb.str; } /** * Generates SQL for a DROP COLUMN operation. * * SQLite 3.35.0+ supports DROP COLUMN natively. * * @param op The drop column operation * @return The SQL ALTER TABLE DROP COLUMN statement */ public string drop_column_sql(DropColumnOperation op) { return "ALTER TABLE " + op.table_name + " DROP COLUMN " + op.column_name; } /** * Generates SQL for a RENAME COLUMN operation. * * @param op The rename column operation * @return The SQL ALTER TABLE RENAME COLUMN statement */ public string rename_column_sql(RenameColumnOperation op) { return "ALTER TABLE " + op.table_name + " RENAME COLUMN " + op.old_name + " TO " + op.new_name; } /** * Generates SQL for a CREATE INDEX operation. * * @param op The create index operation * @return The SQL CREATE INDEX statement */ public string create_index_sql(CreateIndexOperation op) { var sql = new StringBuilder(); sql.append(op.is_unique ? "CREATE UNIQUE INDEX " : "CREATE INDEX "); sql.append(op.index_name); sql.append(" ON "); sql.append(op.table_name); sql.append(" ("); sql.append(string.joinv(", ", op.columns.to_array())); sql.append(")"); return sql.str; } /** * Generates SQL for a DROP INDEX operation. * * @param op The drop index operation * @return The SQL DROP INDEX statement */ public string drop_index_sql(DropIndexOperation op) { return "DROP INDEX IF EXISTS " + op.index_name; } /** * Generates SQL for a DROP FOREIGN KEY operation. * * SQLite does not support ALTER TABLE DROP CONSTRAINT directly. * Instead, this method generates a special marker that is intercepted * by the SQLite internals (SqliteConnection.execute()) which then * performs transparent table recreation. * * Marker format: INVERCARGILL_SQL_SQLITE_DROP_CONSTRAINT(table='name',constraint='name') * or: INVERCARGILL_SQL_SQLITE_DROP_CONSTRAINT(table='name',column='col') * * @param op The drop foreign key operation * @return A special marker that triggers transparent table recreation */ public string drop_foreign_key_sql(DropForeignKeyOperation op) { var sql = new StringBuilder(); sql.append("INVERCARGILL_SQL_SQLITE_DROP_CONSTRAINT("); sql.append("table='"); sql.append(escape_marker_value(op.table_name)); sql.append("'"); if (op.constraint_name != null) { sql.append(",constraint='"); sql.append(escape_marker_value(op.constraint_name)); sql.append("'"); } else if (op.column_name != null) { sql.append(",column='"); sql.append(escape_marker_value(op.column_name)); sql.append("'"); } sql.append(")"); return sql.str; } /** * Escapes a value for use in a DROP CONSTRAINT marker. * Single quotes are escaped by doubling them. */ private string escape_marker_value(string value) { return value.replace("'", "''"); } /** * Generates SQL to drop a foreign key constraint using table recreation. * * This method requires schema introspection data (columns and foreign keys) * which must be obtained separately using PRAGMA commands on an active connection. * * The table recreation strategy: * 1. Create a new table (_table_new) with all columns but without the dropped FK * 2. Copy all data from the old table * 3. Drop the old table * 4. Rename the new table to the original name * 5. Recreate any indexes * * @param table_name The table containing the FK constraint * @param columns The column definitions for the recreated table * @param remaining_constraints All constraints EXCEPT the one being dropped * @param index_definitions Indexes to recreate after table recreation * @return SQL statements for table recreation (semicolon-separated) */ public string generate_drop_fk_table_recreation_sql( string table_name, Vector columns, Vector remaining_constraints, Vector index_definitions ) { var sql = new StringBuilder(); string new_table_name = @"_$(table_name)_new"; // Step 1: Create the new table without the dropped FK sql.append("CREATE TABLE "); sql.append(quote_identifier(new_table_name)); sql.append(" (\n"); var parts = new Vector(); foreach (var col in columns) { parts.add(build_column_sql(col)); } foreach (var constraint in remaining_constraints) { parts.add(build_constraint_sql(constraint)); } bool first = true; foreach (var part in parts) { if (!first) { sql.append(",\n"); } first = false; sql.append(" "); sql.append(part); } sql.append("\n);\n\n"); // Step 2: Copy data from old table to new table var column_names = new Vector(); foreach (var col in columns) { column_names.add(col.name); } sql.append("INSERT INTO "); sql.append(quote_identifier(new_table_name)); sql.append(" ("); sql.append(string.joinv(", ", column_names.to_array())); sql.append(")\nSELECT "); sql.append(string.joinv(", ", column_names.to_array())); sql.append(" FROM "); sql.append(quote_identifier(table_name)); sql.append(";\n\n"); // Step 3: Drop the old table sql.append("DROP TABLE "); sql.append(quote_identifier(table_name)); sql.append(";\n\n"); // Step 4: Rename the new table sql.append("ALTER TABLE "); sql.append(quote_identifier(new_table_name)); sql.append(" RENAME TO "); sql.append(quote_identifier(table_name)); sql.append(";\n"); // Step 5: Recreate indexes foreach (var index_op in index_definitions) { sql.append("\n"); sql.append(create_index_sql(index_op)); sql.append(";"); } return sql.str; } /** * Quotes an identifier for use in SQL. * Uses double quotes for SQLite identifier quoting. */ private string quote_identifier(string identifier) { return "\"" + identifier.replace("\"", "\"\"") + "\""; } // Schema introspection implementation /** * Introspects table schema from a SQLite database. * * This method uses PRAGMA table_info() to discover column metadata * and sqlite_master to identify primary keys. * * @param connection The database connection * @param table_name The table to introspect * @return A TableSchema containing column metadata * @throws SqlError if introspection fails */ public TableSchema introspect_schema(Connection connection, string table_name) throws SqlError { var schema = new TableSchema(); schema.table_name = table_name; // Query column information using PRAGMA table_info var pragma_sql = "PRAGMA table_info(%s)".printf(table_name); var command = connection.create_command(pragma_sql); var results = command.execute_query(); foreach (var row in results) { var column = new ColumnSchema(); // PRAGMA table_info columns: cid, name, type, notnull, dflt_value, pk var name_elem = row.get("name"); var type_elem = row.get("type"); var notnull_elem = row.get("notnull"); var pk_elem = row.get("pk"); if (name_elem != null) { string? name_val = null; name_elem.try_get_as(out name_val); column.name = name_val ?? ""; } if (type_elem != null) { string? type_str = null; type_elem.try_get_as(out type_str); column.column_type = parse_sqlite_type(type_str ?? "TEXT"); } if (notnull_elem != null) { int64? notnull = null; notnull_elem.try_get_as(out notnull); column.is_required = (notnull == 1); } if (pk_elem != null) { int64? pk = null; pk_elem.try_get_as(out pk); column.is_primary_key = (pk != null && pk > 0); if (column.is_primary_key) { schema.primary_key_column = column.name; // Check for auto-increment by querying sqlite_sequence // In SQLite, INTEGER PRIMARY KEY is always auto-increment potential // but only actually auto-increments if the column is declared as such // For simplicity, we'll check if it's an INTEGER PRIMARY KEY if (column.column_type == ColumnType.INT_64 || column.column_type == ColumnType.INT_32) { column.auto_increment = true; } } } schema.columns.add(column); } return schema; } /** * Parses a SQLite type string to a ColumnType. * * SQLite uses type affinity, so we match based on keywords in the type name. * * @param type_str The SQLite type string * @return The corresponding ColumnType */ private ColumnType parse_sqlite_type(string type_str) { var upper = type_str.up(); if ("INT" in upper) { return ColumnType.INT_64; } if ("CHAR" in upper || "CLOB" in upper || "TEXT" in upper) { return ColumnType.TEXT; } if ("BLOB" in upper) { return ColumnType.BINARY; } if ("REAL" in upper || "FLOA" in upper || "DOUB" in upper) { return ColumnType.DECIMAL; } if ("DATE" in upper || "TIME" in upper) { return ColumnType.DATETIME; } if ("BOOL" in upper) { return ColumnType.BOOLEAN; } if ("UUID" in upper || "GUID" in upper) { return ColumnType.UUID; } // Default to TEXT return ColumnType.TEXT; } // Projection query method implementations /** * Builds a SELECT statement with JOINs for a projection query. * * This method constructs a complete SELECT query from a ProjectionDefinition, * including all JOINs, selections, WHERE/HAVING clauses, GROUP BY, ORDER BY, * LIMIT, and OFFSET. * * @param definition The projection definition containing source, joins, and selections * @param translator The variable translator for alias resolution * @param where_clause Optional pre-built WHERE clause SQL * @param having_clause Optional pre-built HAVING clause SQL * @param order_by ORDER BY clauses * @param limit Optional LIMIT value * @param offset Optional OFFSET value * @return The complete SQL SELECT statement */ public string build_projection_select( ProjectionDefinition definition, VariableTranslator translator, string? where_clause, string? having_clause, Vector order_by, int64? limit, int64? offset ) { var sql = new StringBuilder(); // Build SELECT clause with all selections sql.append("SELECT "); build_select_clause(sql, definition, translator); // Build FROM clause with primary source sql.append("\nFROM "); build_from_clause(sql, definition, translator); // Build JOIN clauses for all joins foreach (var join in definition.joins) { sql.append("\n"); build_join_clause(sql, join, translator); } // Build WHERE clause (if provided) if (where_clause != null && where_clause.length > 0) { sql.append("\nWHERE "); sql.append(where_clause); } // Build GROUP BY clause (if any group_by expressions) if (definition.group_by_expressions.length > 0) { sql.append("\nGROUP BY "); bool first = true; foreach (var group_expr in definition.group_by_expressions) { if (!first) { sql.append(", "); } first = false; sql.append(translator.translate_expression(group_expr)); } } // Build HAVING clause (if provided) if (having_clause != null && having_clause.length > 0) { sql.append("\nHAVING "); sql.append(having_clause); } // Build ORDER BY clause (if any) if (order_by.length > 0) { sql.append("\nORDER BY "); bool first = true; foreach (var order in order_by) { if (!first) { sql.append(", "); } first = false; sql.append(order.expression); if (order.descending) { sql.append(" DESC"); } } } // Build LIMIT/OFFSET (if provided) if (limit != null) { sql.append("\nLIMIT "); sql.append(limit.to_string()); } if (offset != null) { sql.append("\nOFFSET "); sql.append(offset.to_string()); } return sql.str; } /** * Builds the SELECT clause with all selections. * * Each selection is formatted as: expression AS friendly_name * * @param sql The StringBuilder to append to * @param definition The projection definition * @param translator The variable translator for alias resolution */ private void build_select_clause( StringBuilder sql, ProjectionDefinition definition, VariableTranslator translator ) { bool first = true; foreach (var selection in definition.selections) { if (!first) { sql.append(", "); } first = false; // For scalar selections, translate the expression // For nested/collection selections, we'll handle them differently var scalar_selection = selection as ScalarSelection; if (scalar_selection != null) { // Translate the expression using the variable translator string translated_expr = translator.translate_expression(scalar_selection.expression); sql.append(translated_expr); sql.append(" AS "); sql.append(selection.friendly_name); } else { // For nested/collection projections, select a placeholder // The actual nested data will be fetched separately sql.append("NULL AS "); sql.append(selection.friendly_name); } } } /** * Builds the FROM clause with the primary source. * * Format: table_name alias * * @param sql The StringBuilder to append to * @param definition The projection definition * @param translator The variable translator for alias resolution */ private void build_from_clause( StringBuilder sql, ProjectionDefinition definition, VariableTranslator translator ) { if (definition.source == null) { return; } sql.append(definition.source.table_name); sql.append(" "); sql.append(translator.translate_variable(definition.source.variable_name)); } /** * Builds a JOIN clause for a join definition. * * Format: JOIN table_name alias ON condition * * @param sql The StringBuilder to append to * @param join The join definition * @param translator The variable translator for alias resolution */ private void build_join_clause( StringBuilder sql, JoinDefinition join, VariableTranslator translator ) { sql.append("JOIN "); sql.append(join.table_name); sql.append(" "); sql.append(translator.translate_variable(join.variable_name)); sql.append(" ON "); sql.append(translator.translate_expression(join.join_condition)); } /** * Builds a subquery wrapper for mixed aggregate/non-aggregate OR conditions. * * When a WHERE clause contains OR conditions that mix aggregate and non-aggregate * expressions, SQL requires special handling. This method wraps the inner query * and applies the combined WHERE clause to the outer query. * * @param inner_query The inner SELECT query * @param combined_where The WHERE clause to apply to the outer query * @return The wrapped query SQL */ public string wrap_subquery_for_mixed_or( string inner_query, string combined_where ) { return @"SELECT * FROM ($inner_query) subq WHERE $combined_where"; } /** * Generates a table alias with type information for debugging. * * The alias format includes an index and the entity type name to make * generated SQL more readable and easier to debug. * * Format: val_N_TypeName (e.g., val_1_User, val_2_Order) * * @param index The 1-based index for this alias * @param type_name The entity type name * @return The generated alias string */ public string generate_table_alias(int index, string type_name) { return "val_%d_%s".printf(index, type_name); } } }