Fork me on GitHub

Schema Feature Extensions Edit on GitHub


New in Marten 2.4.0 is the ability to add additional features with custom database schema objects that simply plug into Marten's schema management facilities. The key abstraction is the IFeatureSchema interface shown below:


/// <summary>
/// Defines the database objects for a named feature within your
/// Marten application
/// </summary>
public interface IFeatureSchema
{
    /// <summary>
    /// Any document or feature types that this feature depends on. Used
    /// to intelligently order the creation and scripting of database
    /// schema objects
    /// </summary>
    /// <returns></returns>
    IEnumerable<Type> DependentTypes();
    
    /// <summary>
    /// Should this feature be active based on the current options? 
    /// </summary>
    /// <param name="options"></param>
    /// <returns></returns>
    bool IsActive(StoreOptions options);
    
    /// <summary>
    /// All the schema objects in this feature
    /// </summary>
    ISchemaObject[] Objects { get; }
    
    /// <summary>
    /// Identifier by type for this feature. Used along with the DependentTypes()
    /// collection to control the proper ordering of object creation or scripting
    /// </summary>
    Type StorageType { get; }

    /// <summary>
    /// Really just the filename when the SQL is exported
    /// </summary>
    string Identifier { get; }

    /// <summary>
    /// Write any permission SQL when this feature is exported to a SQL
    /// file 
    /// </summary>
    /// <param name="rules"></param>
    /// <param name="writer"></param>
    void WritePermissions(DdlRules rules, StringWriter writer);
}

Not to worry though, Marten comes with a base class that makes it a bit simpler to build out new features. Here's a very simple example that defines a custom table with one column:


public class FakeStorage : FeatureSchemaBase
{
    public FakeStorage(StoreOptions options) : base("fake", options)
    {
    }

    protected override IEnumerable<ISchemaObject> schemaObjects()
    {
        var table = new Table(new DbObjectName(Options.DatabaseSchemaName,"mt_fake_table"));
        table.AddColumn("name", "varchar");

        yield return table;
    }
}

Now, to actually apply this feature to your Marten applications, use this syntax:


var store = DocumentStore.For(_ =>
{
    // Creates a new instance of FakeStorage and
    // passes along the current StoreOptions
    _.Storage.Add<FakeStorage>();
    
    // or
    
    _.Storage.Add(new FakeStorage(_));
});

Do note that when you use the Add<T>() syntax, Marten will pass along the current StoreOptions to the constructor function if there is a constructor with that signature. Otherwise, it uses the no-arg constructor.

While you can directly implement the ISchemaObject interface for something Marten doesn't already support, it's probably far easier to just configure one of the existing implementations shown in the following sections.

  • Table
  • Function
  • Sequence

Table

Postgresql tables can be modeled with the Table class as shown in this example from the event store inside of Marten:


public class EventsTable : Table
{
    public EventsTable(EventGraph events) : base(new DbObjectName(events.DatabaseSchemaName, "mt_events"))
    {
        var stringIdType = events.GetStreamIdType();

        AddPrimaryKey(new TableColumn("seq_id", "bigint"));
        AddColumn("id", "uuid", "NOT NULL");
        AddColumn("stream_id", stringIdType, $"REFERENCES {events.DatabaseSchemaName}.mt_streams ON DELETE CASCADE");
        AddColumn("version", "integer", "NOT NULL");
        AddColumn("data", "jsonb", "NOT NULL");
        AddColumn("type", "varchar(100)", "NOT NULL");
        AddColumn("timestamp", "timestamptz", "default (now()) NOT NULL");
        AddColumn<TenantIdColumn>();
        AddColumn(new DotNetTypeColumn {Directive = "NULL"});

        Constraints.Add("CONSTRAINT pk_mt_events_stream_and_version UNIQUE(stream_id, version)");
        Constraints.Add("CONSTRAINT pk_mt_events_id_unique UNIQUE(id)");
    }
}

Function

Postgresql functions can be managed by creating a subclass of the Function base class as shown below from the big "append event" function in the event store:


    public class AppendEventFunction : Function
    {
        private readonly EventGraph _events;

        public AppendEventFunction(EventGraph events) : base(new DbObjectName(events.DatabaseSchemaName, "mt_append_event"))
        {
            _events = events;
        }

        public override void Write(DdlRules rules, StringWriter writer)
        {
            var streamIdType = _events.GetStreamIdType();
            var databaseSchema = _events.DatabaseSchemaName;

            writer.WriteLine($@"
CREATE OR REPLACE FUNCTION {Identifier}(stream {streamIdType}, stream_type varchar, tenantid varchar, event_ids uuid[], event_types varchar[], dotnet_types varchar[], bodies jsonb[]) RETURNS int[] AS $$
DECLARE
	event_version int;
	event_type varchar;
	event_id uuid;
	body jsonb;
	index int;
	seq int;
    actual_tenant varchar;
	return_value int[];
BEGIN
	select version into event_version from {databaseSchema}.mt_streams where id = stream;
	if event_version IS NULL then
		event_version = 0;
		insert into {databaseSchema}.mt_streams (id, type, version, timestamp, tenant_id) values (stream, stream_type, 0, now(), tenantid);
    else
        if tenantid IS NOT NULL then
            select tenant_id into actual_tenant from {databaseSchema}.mt_streams where id = stream;
            if actual_tenant != tenantid then
                RAISE EXCEPTION 'The tenantid does not match the existing stream';
            end if;
        end if;
	end if;


	index := 1;
	return_value := ARRAY[event_version + array_length(event_ids, 1)];

	foreach event_id in ARRAY event_ids
	loop
	    seq := nextval('{databaseSchema}.mt_events_sequence');
		return_value := array_append(return_value, seq);

	    event_version := event_version + 1;
		event_type = event_types[index];
		body = bodies[index];

		insert into {databaseSchema}.mt_events 
			(seq_id, id, stream_id, version, data, type, tenant_id, {DocumentMapping.DotNetTypeColumn}) 
		values 
			(seq, event_id, stream, event_version, body, event_type, tenantid, dotnet_types[index]);

		
		index := index + 1;
	end loop;

	update {databaseSchema}.mt_streams set version = event_version, timestamp = now() where id = stream;

	return return_value;
END
$$ LANGUAGE plpgsql;
");
        }

        protected override string toDropSql()
        {
            var streamIdType = _events.GetStreamIdType();
            return $"drop function if exists {Identifier} ({streamIdType}, varchar, varchar, uuid[], varchar[], jsonb[])";
        }
    }

Sequence

Postgresql sequences can be managed with this usage:


var sequence = new Sequence(new DbObjectName(DatabaseSchemaName, "mt_events_sequence"))
{
    Owner = eventsTable.Identifier,
    OwnerColumn = "seq_id"
};