Auto-migration generation

This commit is contained in:
Tobe O 2017-11-18 15:10:58 -05:00
parent ff3e3b535b
commit 97989a6250
21 changed files with 386 additions and 167 deletions

View file

@ -4,15 +4,14 @@
<content url="file://$MODULE_DIR$/angel_orm">
<excludeFolder url="file://$MODULE_DIR$/angel_orm/.pub" />
<excludeFolder url="file://$MODULE_DIR$/angel_orm/build" />
<excludeFolder url="file://$MODULE_DIR$/angel_orm/packages" />
</content>
<content url="file://$MODULE_DIR$/angel_orm_generator">
<excludeFolder url="file://$MODULE_DIR$/angel_orm_generator/.pub" />
<excludeFolder url="file://$MODULE_DIR$/angel_orm_generator/build" />
<excludeFolder url="file://$MODULE_DIR$/angel_orm_generator/packages" />
<excludeFolder url="file://$MODULE_DIR$/angel_orm_generator/test/models/packages" />
<excludeFolder url="file://$MODULE_DIR$/angel_orm_generator/test/packages" />
<excludeFolder url="file://$MODULE_DIR$/angel_orm_generator/tool/packages" />
</content>
<content url="file://$MODULE_DIR$/external">
<excludeFolder url="file://$MODULE_DIR$/external/.pub" />
<excludeFolder url="file://$MODULE_DIR$/external/build" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />

View file

@ -56,57 +56,57 @@ enum IndexType {
class ColumnType {
/// The name of this data type.
final String name;
const ColumnType._(this.name);
const ColumnType(this.name);
static const ColumnType BOOLEAN = const ColumnType._('boolean');
static const ColumnType BOOLEAN = const ColumnType('boolean');
static const ColumnType SMALL_SERIAL = const ColumnType._('smallserial');
static const ColumnType SERIAL = const ColumnType._('serial');
static const ColumnType BIG_SERIAL = const ColumnType._('bigserial');
static const ColumnType SMALL_SERIAL = const ColumnType('smallserial');
static const ColumnType SERIAL = const ColumnType('serial');
static const ColumnType BIG_SERIAL = const ColumnType('bigserial');
// Numbers
static const ColumnType BIG_INT = const ColumnType._('bigint');
static const ColumnType INT = const ColumnType._('int');
static const ColumnType SMALL_INT = const ColumnType._('smallint');
static const ColumnType TINY_INT = const ColumnType._('tinyint');
static const ColumnType BIT = const ColumnType._('bit');
static const ColumnType DECIMAL = const ColumnType._('decimal');
static const ColumnType NUMERIC = const ColumnType._('numeric');
static const ColumnType MONEY = const ColumnType._('money');
static const ColumnType SMALL_MONEY = const ColumnType._('smallmoney');
static const ColumnType FLOAT = const ColumnType._('float');
static const ColumnType REAL = const ColumnType._('real');
static const ColumnType BIG_INT = const ColumnType('bigint');
static const ColumnType INT = const ColumnType('int');
static const ColumnType SMALL_INT = const ColumnType('smallint');
static const ColumnType TINY_INT = const ColumnType('tinyint');
static const ColumnType BIT = const ColumnType('bit');
static const ColumnType DECIMAL = const ColumnType('decimal');
static const ColumnType NUMERIC = const ColumnType('numeric');
static const ColumnType MONEY = const ColumnType('money');
static const ColumnType SMALL_MONEY = const ColumnType('smallmoney');
static const ColumnType FLOAT = const ColumnType('float');
static const ColumnType REAL = const ColumnType('real');
// Dates and times
static const ColumnType DATE_TIME = const ColumnType._('datetime');
static const ColumnType SMALL_DATE_TIME = const ColumnType._('smalldatetime');
static const ColumnType DATE = const ColumnType._('date');
static const ColumnType TIME = const ColumnType._('time');
static const ColumnType TIME_STAMP = const ColumnType._('timestamp');
static const ColumnType DATE_TIME = const ColumnType('datetime');
static const ColumnType SMALL_DATE_TIME = const ColumnType('smalldatetime');
static const ColumnType DATE = const ColumnType('date');
static const ColumnType TIME = const ColumnType('time');
static const ColumnType TIME_STAMP = const ColumnType('timestamp');
// Strings
static const ColumnType CHAR = const ColumnType._('char');
static const ColumnType VAR_CHAR = const ColumnType._('varchar');
static const ColumnType VAR_CHAR_MAX = const ColumnType._('varchar(max)');
static const ColumnType TEXT = const ColumnType._('text');
static const ColumnType CHAR = const ColumnType('char');
static const ColumnType VAR_CHAR = const ColumnType('varchar');
static const ColumnType VAR_CHAR_MAX = const ColumnType('varchar(max)');
static const ColumnType TEXT = const ColumnType('text');
// Unicode strings
static const ColumnType NCHAR = const ColumnType._('nchar');
static const ColumnType NVAR_CHAR = const ColumnType._('nvarchar');
static const ColumnType NVAR_CHAR_MAX = const ColumnType._('nvarchar(max)');
static const ColumnType NTEXT = const ColumnType._('ntext');
static const ColumnType NCHAR = const ColumnType('nchar');
static const ColumnType NVAR_CHAR = const ColumnType('nvarchar');
static const ColumnType NVAR_CHAR_MAX = const ColumnType('nvarchar(max)');
static const ColumnType NTEXT = const ColumnType('ntext');
// Binary
static const ColumnType BINARY = const ColumnType._('binary');
static const ColumnType VAR_BINARY = const ColumnType._('varbinary');
static const ColumnType VAR_BINARY_MAX = const ColumnType._('varbinary(max)');
static const ColumnType IMAGE = const ColumnType._('image');
static const ColumnType BINARY = const ColumnType('binary');
static const ColumnType VAR_BINARY = const ColumnType('varbinary');
static const ColumnType VAR_BINARY_MAX = const ColumnType('varbinary(max)');
static const ColumnType IMAGE = const ColumnType('image');
// Misc.
static const ColumnType SQL_VARIANT = const ColumnType._('sql_variant');
static const ColumnType SQL_VARIANT = const ColumnType('sql_variant');
static const ColumnType UNIQUE_IDENTIFIER =
const ColumnType._('uniqueidentifier');
static const ColumnType XML = const ColumnType._('xml');
static const ColumnType CURSOR = const ColumnType._('cursor');
static const ColumnType TABLE = const ColumnType._('table');
const ColumnType('uniqueidentifier');
static const ColumnType XML = const ColumnType('xml');
static const ColumnType CURSOR = const ColumnType('cursor');
static const ColumnType TABLE = const ColumnType('table');
}

View file

@ -1,5 +1,5 @@
name: angel_orm
version: 1.0.0-alpha+7
version: 1.0.0-alpha+8
description: Runtime support for Angel's ORM.
author: Tobe O <thosakwe@gmail.com>
homepage: https://github.com/angel-dart/orm

View file

@ -1,3 +1,4 @@
export 'src/builder/orm/migration.dart';
export 'src/builder/orm/postgres.dart';
export 'src/builder/orm/service.dart';
export 'src/builder/orm/service.dart';
export 'src/builder/orm/sql_migration.dart';

View file

@ -142,6 +142,16 @@ Future<PostgresBuildContext> buildContext(
);
}
if (column != null) {
column = new Column(
nullable: column.nullable,
length: column.length,
index: column.index,
defaultValue: column.defaultValue,
type: inferColumnType(field.type),
);
}
if (column?.type == null)
throw 'Cannot infer SQL column type for field "${field.name}" with type "${field.type.name}".';
ctx.columnInfo[field.name] = column;

View file

@ -1,86 +1,65 @@
import 'dart:async';
import 'package:analyzer/dart/ast/ast.dart';
import 'package:analyzer/dart/element/element.dart';
import 'package:angel_orm/angel_orm.dart';
import 'package:build/build.dart';
import 'package:code_builder/dart/core.dart';
import 'package:code_builder/code_builder.dart';
import 'package:source_gen/source_gen.dart' hide LibraryBuilder;
import 'build_context.dart';
import 'package:source_gen/source_gen.dart';
import 'postgres_build_context.dart';
class SqlMigrationBuilder implements Builder {
class MigrationGenerator extends GeneratorForAnnotation<ORM> {
static final ParameterBuilder _schemaParam = parameter('schema', [
new TypeBuilder('Schema'),
]);
static final ReferenceBuilder _schema = reference('schema');
/// If `true` (default), then field names will automatically be (de)serialized as snake_case.
final bool autoSnakeCaseNames;
/// If `true` (default), then the schema will automatically add id, created_at and updated_at fields.
final bool autoIdAndDateFields;
/// If `true` (default: `false`), then the resulting schema will generate a `TEMPORARY` table.
final bool temporary;
const SqlMigrationBuilder(
{this.autoSnakeCaseNames: true,
this.autoIdAndDateFields: true,
this.temporary: false});
const MigrationGenerator(
{this.autoSnakeCaseNames: true, this.autoIdAndDateFields: true});
@override
Map<String, List<String>> get buildExtensions => {
'.dart': ['.up.g.sql', '.down.g.sql']
};
Future<String> generateForAnnotatedElement(
Element element, ConstantReader annotation, BuildStep buildStep) async {
if (buildStep.inputId.path.contains('.migration.g.dart')) {
return null;
}
@override
Future build(BuildStep buildStep) async {
if (element is! ClassElement)
throw 'Only classes can be annotated with @ORM().';
var resolver = await buildStep.resolver;
var up = new StringBuffer();
var down = new StringBuffer();
if (!await resolver.isLibrary(buildStep.inputId)) {
return;
}
var lib = await resolver.libraryFor(buildStep.inputId);
var elements = lib.definingCompilationUnit.unit.declarations;
if (!elements.any(
(el) => ormTypeChecker.firstAnnotationOf(el.element) != null)) return;
await generateSqlMigrations(lib, resolver, buildStep, up, down);
buildStep.writeAsString(
buildStep.inputId.changeExtension('.up.g.sql'), up.toString());
buildStep.writeAsString(
buildStep.inputId.changeExtension('.down.g.sql'), down.toString());
var ctx = await buildContext(element, reviveOrm(annotation), buildStep,
resolver, autoSnakeCaseNames != false, autoIdAndDateFields != false);
var lib = generateMigrationLibrary(ctx, element, resolver, buildStep);
if (lib == null) return null;
return prettyToSource(lib.buildAst());
}
Future generateSqlMigrations(LibraryElement libraryElement, Resolver resolver,
BuildStep buildStep, StringBuffer up, StringBuffer down) async {
List<String> done = [];
for (var element
in libraryElement.definingCompilationUnit.unit.declarations) {
if (element is ClassDeclaration && !done.contains(element.name)) {
var ann = ormTypeChecker.firstAnnotationOf(element.element);
if (ann != null) {
var ctx = await buildContext(
element.element,
reviveOrm(new ConstantReader(ann)),
buildStep,
resolver,
autoSnakeCaseNames != false,
autoIdAndDateFields != false);
buildUpMigration(ctx, up);
buildDownMigration(ctx, down);
done.add(element.name.name);
}
}
}
LibraryBuilder generateMigrationLibrary(PostgresBuildContext ctx,
ClassElement element, Resolver resolver, BuildStep buildStep) {
var lib = new LibraryBuilder()
..addDirective(
new ImportBuilder('package:angel_migration/angel_migration.dart'));
var clazz = new ClassBuilder('${ctx.modelClassName}Migration',
asExtends: new TypeBuilder('Migration'));
clazz..addMethod(buildUpMigration(ctx))..addMethod(buildDownMigration(ctx));
return lib..addMember(clazz);
}
void buildUpMigration(PostgresBuildContext ctx, StringBuffer buf) {
if (temporary == true)
buf.writeln('CREATE TEMPORARY TABLE "${ctx.tableName}" (');
else
buf.writeln('CREATE TABLE "${ctx.tableName}" (');
MethodBuilder buildUpMigration(PostgresBuildContext ctx) {
var meth = new MethodBuilder('up')..addPositional(_schemaParam);
var closure = new MethodBuilder.closure()
..addPositional(parameter('table'));
var table = reference('table');
List<String> dup = [];
int i = 0;
ctx.columnInfo.forEach((name, col) {
var key = ctx.resolveFieldName(name);
@ -96,70 +75,121 @@ class SqlMigrationBuilder implements Builder {
}
dup.add(key);
if (i++ > 0) buf.writeln(',');
}
buf.write(' "$key" ${col.type.name}');
String methodName;
List<ExpressionBuilder> positional = [literal(key)];
Map<String, ExpressionBuilder> named = {};
if (col.index == IndexType.PRIMARY_KEY)
buf.write(' PRIMARY KEY');
else if (col.index == IndexType.UNIQUE) buf.write(' UNIQUE');
if (autoIdAndDateFields != false && name == 'id')
methodName = 'serial';
if (col.nullable != true) buf.write(' NOT NULLABLE');
if (methodName == null) {
switch (col.type) {
case ColumnType.VAR_CHAR:
methodName = 'varchar';
if (col.length != null) named['length'] = literal(col.length);
break;
case ColumnType.SERIAL:
methodName = 'serial';
break;
case ColumnType.INT:
methodName = 'integer';
break;
case ColumnType.FLOAT:
methodName = 'float';
break;
case ColumnType.NUMERIC:
methodName = 'numeric';
break;
case ColumnType.BOOLEAN:
methodName = 'boolean';
break;
case ColumnType.DATE:
methodName = 'date';
break;
case ColumnType.DATE_TIME:
methodName = 'dateTime';
break;
case ColumnType.TIME_STAMP:
methodName = 'timeStamp';
break;
default:
ExpressionBuilder provColumn;
var colType = new TypeBuilder('Column');
var columnTypeType = new TypeBuilder('ColumnType');
if (col.length == null) {
methodName = 'declare';
provColumn = columnTypeType.newInstance([
literal(col.type.name),
]);
} else {
methodName = 'declareColumn';
provColumn = colType.newInstance([], named: {
'type': columnTypeType.newInstance([
literal(col.type.name),
]),
'length': literal(col.length),
});
}
positional.add(provColumn);
break;
}
}
var field = table.invoke(methodName, positional, namedArguments: named);
var cascade = <ExpressionBuilder Function(ExpressionBuilder)>[];
if (col.defaultValue != null) {
cascade.add((e) => e.invoke('defaultsTo', [literal(col.defaultValue)]));
}
if (col.index == IndexType.PRIMARY_KEY ||
(autoIdAndDateFields != false && name == 'id'))
cascade.add((e) => e.invoke('primaryKey', []));
else if (col.index == IndexType.UNIQUE)
cascade.add((e) => e.invoke('unique', []));
if (col.nullable != true) cascade.add((e) => e.invoke('notNull', []));
field = cascade.isEmpty
? field
: field.cascade((e) => cascade.map((f) => f(e)).toList());
closure.addStatement(field);
});
// Relations
ctx.relationships.forEach((name, r) {
var relationship = ctx.populateRelationship(name);
if (relationship.isBelongsTo) {
var key = relationship.localKey;
if (dup.contains(key))
return;
else {
dup.add(key);
if (i++ > 0) buf.writeln(',');
}
buf.write(
' "${relationship.localKey}" int REFERENCES ${relationship.foreignTable}(${relationship.foreignKey})');
var field = table.invoke('integer', [literal(key)]);
// .references('user', 'id').onDeleteCascade()
var ref = field.invoke('references', [
literal(relationship.foreignTable),
literal(relationship.foreignKey),
]);
if (relationship.cascadeOnDelete != false && relationship.isSingular)
buf.write(' ON DELETE CASCADE');
ref = ref.invoke('onDeleteCascade', []);
return closure.addStatement(ref);
}
});
// Primary keys, unique
bool hasPrimary = false;
ctx.fields.forEach((f) {
var col = ctx.columnInfo[f.name];
if (col != null) {
var name = ctx.resolveFieldName(f.name);
if (col.index == IndexType.UNIQUE) {
if (i++ > 0) buf.writeln(',');
buf.write(' UNIQUE($name)');
} else if (col.index == IndexType.PRIMARY_KEY) {
if (i++ > 0) buf.writeln(',');
hasPrimary = true;
buf.write(' PRIMARY KEY($name)');
}
}
});
if (!hasPrimary) {
var idField =
ctx.fields.firstWhere((f) => f.name == 'id', orElse: () => null);
if (idField != null) {
if (i++ > 0) buf.writeln(',');
buf.write(' PRIMARY KEY(id)');
}
}
buf.writeln();
buf.writeln(');');
meth.addStatement(_schema.invoke('create', [
literal(ctx.tableName),
closure,
]));
return meth..addAnnotation(lib$core.override);
}
void buildDownMigration(PostgresBuildContext ctx, StringBuffer buf) {
buf.writeln('DROP TABLE "${ctx.tableName}";');
MethodBuilder buildDownMigration(PostgresBuildContext ctx) {
return method('down', [
_schemaParam,
_schema.invoke('drop', [literal(ctx.tableName)]),
])
..addAnnotation(lib$core.override);
}
}

View file

@ -0,0 +1,165 @@
import 'dart:async';
import 'package:analyzer/dart/ast/ast.dart';
import 'package:analyzer/dart/element/element.dart';
import 'package:angel_orm/angel_orm.dart';
import 'package:build/build.dart';
import 'build_context.dart';
import 'package:source_gen/source_gen.dart';
import 'postgres_build_context.dart';
class SqlMigrationBuilder implements Builder {
/// If `true` (default), then field names will automatically be (de)serialized as snake_case.
final bool autoSnakeCaseNames;
/// If `true` (default), then the schema will automatically add id, created_at and updated_at fields.
final bool autoIdAndDateFields;
/// If `true` (default: `false`), then the resulting schema will generate a `TEMPORARY` table.
final bool temporary;
const SqlMigrationBuilder(
{this.autoSnakeCaseNames: true,
this.autoIdAndDateFields: true,
this.temporary: false});
@override
Map<String, List<String>> get buildExtensions => {
'.dart': ['.up.g.sql', '.down.g.sql']
};
@override
Future build(BuildStep buildStep) async {
var resolver = await buildStep.resolver;
var up = new StringBuffer();
var down = new StringBuffer();
if (!await resolver.isLibrary(buildStep.inputId)) {
return;
}
var lib = await resolver.libraryFor(buildStep.inputId);
var elements = lib.definingCompilationUnit.unit.declarations;
if (!elements.any(
(el) => ormTypeChecker.firstAnnotationOf(el.element) != null)) return;
await generateSqlMigrations(lib, resolver, buildStep, up, down);
buildStep.writeAsString(
buildStep.inputId.changeExtension('.up.g.sql'), up.toString());
buildStep.writeAsString(
buildStep.inputId.changeExtension('.down.g.sql'), down.toString());
}
Future generateSqlMigrations(LibraryElement libraryElement, Resolver resolver,
BuildStep buildStep, StringBuffer up, StringBuffer down) async {
List<String> done = [];
for (var element
in libraryElement.definingCompilationUnit.unit.declarations) {
if (element is ClassDeclaration && !done.contains(element.name)) {
var ann = ormTypeChecker.firstAnnotationOf(element.element);
if (ann != null) {
var ctx = await buildContext(
element.element,
reviveOrm(new ConstantReader(ann)),
buildStep,
resolver,
autoSnakeCaseNames != false,
autoIdAndDateFields != false);
buildUpMigration(ctx, up);
buildDownMigration(ctx, down);
done.add(element.name.name);
}
}
}
}
void buildUpMigration(PostgresBuildContext ctx, StringBuffer buf) {
if (temporary == true)
buf.writeln('CREATE TEMPORARY TABLE "${ctx.tableName}" (');
else
buf.writeln('CREATE TABLE "${ctx.tableName}" (');
List<String> dup = [];
int i = 0;
ctx.columnInfo.forEach((name, col) {
var key = ctx.resolveFieldName(name);
if (dup.contains(key))
return;
else {
if (key != 'id' || autoIdAndDateFields == false) {
// Check for relationships that might duplicate
for (var rName in ctx.relationships.keys) {
var relationship = ctx.populateRelationship(rName);
if (relationship.localKey == key) return;
}
}
dup.add(key);
if (i++ > 0) buf.writeln(',');
}
buf.write(' "$key" ${col.type.name}');
if (col.index == IndexType.PRIMARY_KEY)
buf.write(' PRIMARY KEY');
else if (col.index == IndexType.UNIQUE) buf.write(' UNIQUE');
if (col.nullable != true) buf.write(' NOT NULLABLE');
});
// Relations
ctx.relationships.forEach((name, r) {
var relationship = ctx.populateRelationship(name);
if (relationship.isBelongsTo) {
var key = relationship.localKey;
if (dup.contains(key))
return;
else {
dup.add(key);
if (i++ > 0) buf.writeln(',');
}
buf.write(
' "${relationship.localKey}" int REFERENCES ${relationship.foreignTable}(${relationship.foreignKey})');
if (relationship.cascadeOnDelete != false && relationship.isSingular)
buf.write(' ON DELETE CASCADE');
}
});
// Primary keys, unique
bool hasPrimary = false;
ctx.fields.forEach((f) {
var col = ctx.columnInfo[f.name];
if (col != null) {
var name = ctx.resolveFieldName(f.name);
if (col.index == IndexType.UNIQUE) {
if (i++ > 0) buf.writeln(',');
buf.write(' UNIQUE($name)');
} else if (col.index == IndexType.PRIMARY_KEY) {
if (i++ > 0) buf.writeln(',');
hasPrimary = true;
buf.write(' PRIMARY KEY($name)');
}
}
});
if (!hasPrimary) {
var idField =
ctx.fields.firstWhere((f) => f.name == 'id', orElse: () => null);
if (idField != null) {
if (i++ > 0) buf.writeln(',');
buf.write(' PRIMARY KEY(id)');
}
}
buf.writeln();
buf.writeln(');');
}
void buildDownMigration(PostgresBuildContext ctx, StringBuffer buf) {
buf.writeln('DROP TABLE "${ctx.tableName}";');
}
}

View file

@ -1,5 +1,5 @@
name: angel_orm_generator
version: 1.0.0-alpha+4
version: 1.0.0-alpha+5
description: Code generators for Angel's ORM.
author: Tobe O <thosakwe@gmail.com>
homepage: https://github.com/angel-dart/orm
@ -15,6 +15,7 @@ dependencies:
dev_dependencies:
angel_diagnostics: ^1.0.0
angel_framework: ^1.0.0
angel_migration: ^1.0.0-alpha
angel_test: ^1.0.0
build_runner: ^0.5.0
test: ^0.12.0

View file

@ -8,5 +8,6 @@ part 'author.g.dart';
@serializable
@orm
class _Author extends Model {
@Column(length: 255, index: IndexType.UNIQUE, defaultValue: 'Tobe Osakwe')
String name;
}
}

View file

@ -120,7 +120,7 @@ class AuthorQuery {
}
Stream<Author> update(PostgreSQLConnection connection,
{String name, DateTime createdAt, DateTime updatedAt}) {
{String name: 'Tobe Osakwe', DateTime createdAt, DateTime updatedAt}) {
var buf = new StringBuffer(
'UPDATE "authors" SET ("name", "created_at", "updated_at") = (@name, @createdAt, @updatedAt) ');
var whereClause = where.toWhereClause();
@ -171,7 +171,9 @@ class AuthorQuery {
}
static Future<Author> insert(PostgreSQLConnection connection,
{String name, DateTime createdAt, DateTime updatedAt}) async {
{String name: 'Tobe Osakwe',
DateTime createdAt,
DateTime updatedAt}) async {
var __ormNow__ = new DateTime.now();
var result = await connection.query(
'INSERT INTO "authors" ("name", "created_at", "updated_at") VALUES (@name, @createdAt, @updatedAt) RETURNING "id", "name", "created_at", "updated_at";',

View file

@ -1,7 +1,8 @@
CREATE TEMPORARY TABLE "authors" (
"id" serial,
"name" varchar,
"id" varchar,
"name" varchar UNIQUE,
"created_at" timestamp,
"updated_at" timestamp,
UNIQUE(name),
PRIMARY KEY(id)
);

View file

@ -11,6 +11,7 @@ part 'book.g.dart';
class _Book extends Model {
@belongsTo
Author author;
int authorId;
String name;
}

View file

@ -1,5 +1,5 @@
CREATE TEMPORARY TABLE "books" (
"id" serial,
"id" varchar,
"name" varchar,
"created_at" timestamp,
"updated_at" timestamp,

View file

@ -1,5 +1,5 @@
CREATE TEMPORARY TABLE "cars" (
"id" serial,
"id" varchar,
"make" varchar,
"description" varchar,
"family_friendly" boolean,

View file

@ -1,5 +1,5 @@
CREATE TEMPORARY TABLE "foots" (
"id" serial,
"id" varchar,
"leg_id" int,
"n_toes" int,
"created_at" timestamp,

View file

@ -1,5 +1,5 @@
CREATE TEMPORARY TABLE "fruits" (
"id" serial,
"id" varchar,
"tree_id" int,
"common_name" varchar,
"created_at" timestamp,

View file

@ -1,5 +1,5 @@
CREATE TEMPORARY TABLE "legs" (
"id" serial,
"id" varchar,
"name" varchar,
"created_at" timestamp,
"updated_at" timestamp,

View file

@ -1,5 +1,5 @@
CREATE TEMPORARY TABLE "roles" (
"id" serial,
"id" varchar,
"name" varchar,
"created_at" timestamp,
"updated_at" timestamp,

View file

@ -1,6 +1,6 @@
CREATE TEMPORARY TABLE "trees" (
"id" serial,
"rings" smallint UNIQUE,
"id" varchar,
"rings" int UNIQUE,
"created_at" timestamp,
"updated_at" timestamp,
UNIQUE(rings),

View file

@ -1,5 +1,5 @@
CREATE TEMPORARY TABLE "users" (
"id" serial,
"id" varchar,
"username" varchar,
"password" varchar,
"email" varchar,

View file

@ -54,6 +54,14 @@ final List<BuildAction> actions = [
packageName,
inputs: allModels,
),
new BuildAction(
new LibraryBuilder(
const MigrationGenerator(),
generatedExtension: '.migration.g.dart',
),
packageName,
inputs: allModels,
),
new BuildAction(
const SqlMigrationBuilder(
temporary: true,