Generate SQL migrations
This commit is contained in:
parent
ba2afae0f4
commit
a3eb1b8a32
21 changed files with 674 additions and 176 deletions
|
@ -2,7 +2,7 @@
|
||||||
<project version="4">
|
<project version="4">
|
||||||
<component name="ProjectModuleManager">
|
<component name="ProjectModuleManager">
|
||||||
<modules>
|
<modules>
|
||||||
<module fileurl="file://$PROJECT_DIR$/.idea/postgres.iml" filepath="$PROJECT_DIR$/.idea/postgres.iml" />
|
<module fileurl="file://$PROJECT_DIR$/.idea/orm.iml" filepath="$PROJECT_DIR$/.idea/orm.iml" />
|
||||||
</modules>
|
</modules>
|
||||||
</component>
|
</component>
|
||||||
</project>
|
</project>
|
|
@ -12,14 +12,6 @@
|
||||||
<excludeFolder url="file://$MODULE_DIR$/tmp" />
|
<excludeFolder url="file://$MODULE_DIR$/tmp" />
|
||||||
<excludeFolder url="file://$MODULE_DIR$/tool/packages" />
|
<excludeFolder url="file://$MODULE_DIR$/tool/packages" />
|
||||||
</content>
|
</content>
|
||||||
<content url="file://$MODULE_DIR$/../serialize">
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/../serialize/.pub" />
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/../serialize/build" />
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/../serialize/packages" />
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/../serialize/test/models/packages" />
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/../serialize/test/packages" />
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/../serialize/tool/packages" />
|
|
||||||
</content>
|
|
||||||
<orderEntry type="inheritedJdk" />
|
<orderEntry type="inheritedJdk" />
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
<orderEntry type="sourceFolder" forTests="false" />
|
||||||
<orderEntry type="library" name="Dart SDK" level="project" />
|
<orderEntry type="library" name="Dart SDK" level="project" />
|
|
@ -1,7 +0,0 @@
|
||||||
<component name="ProjectRunConfigurationManager">
|
|
||||||
<configuration default="false" name="serialize::build.dart" type="DartCommandLineRunConfigurationType" factoryName="Dart Command Line Application" singleton="true">
|
|
||||||
<option name="filePath" value="$PROJECT_DIR$/../serialize/tool/build.dart" />
|
|
||||||
<option name="workingDirectory" value="$PROJECT_DIR$/../serialize" />
|
|
||||||
<method />
|
|
||||||
</configuration>
|
|
||||||
</component>
|
|
|
@ -1 +1,2 @@
|
||||||
|
export 'src/builder/postgres/migration.dart';
|
||||||
export 'src/builder/postgres/postgres.dart';
|
export 'src/builder/postgres/postgres.dart';
|
|
@ -1,8 +0,0 @@
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
|
||||||
import 'package:source_gen/src/annotation.dart';
|
|
||||||
|
|
||||||
T findAnnotation<T>(FieldElement field, Type outType) {
|
|
||||||
var first = field.metadata
|
|
||||||
.firstWhere((ann) => matchAnnotation(outType, ann), orElse: () => null);
|
|
||||||
return first == null ? null : instantiateAnnotation(first);
|
|
||||||
}
|
|
|
@ -1,80 +1,87 @@
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
import 'package:angel_serialize/angel_serialize.dart';
|
import 'package:angel_serialize/build_context.dart' as serialize;
|
||||||
|
import 'package:angel_serialize/context.dart' as serialize;
|
||||||
import 'package:build/build.dart';
|
import 'package:build/build.dart';
|
||||||
import 'package:inflection/inflection.dart';
|
import 'package:inflection/inflection.dart';
|
||||||
import 'package:path/path.dart' as p;
|
|
||||||
import 'package:recase/recase.dart';
|
import 'package:recase/recase.dart';
|
||||||
import '../../annotations.dart';
|
import '../../annotations.dart';
|
||||||
import '../../migration.dart';
|
import '../../migration.dart';
|
||||||
import '../../relations.dart';
|
import '../../relations.dart';
|
||||||
import '../find_annotation.dart';
|
import 'package:angel_serialize/src/find_annotation.dart';
|
||||||
|
import 'package:source_gen/src/annotation.dart';
|
||||||
import 'postgres_build_context.dart';
|
import 'postgres_build_context.dart';
|
||||||
|
|
||||||
// TODO: Should add id, createdAt, updatedAt...
|
// TODO: Should add id, createdAt, updatedAt...
|
||||||
PostgresBuildContext buildContext(ClassElement clazz, ORM annotation,
|
PostgresBuildContext buildContext(
|
||||||
BuildStep buildStep, bool autoSnakeCaseNames) {
|
ClassElement clazz,
|
||||||
var ctx = new PostgresBuildContext(annotation,
|
ORM annotation,
|
||||||
originalClassName: clazz.name,
|
BuildStep buildStep,
|
||||||
|
Resolver resolver,
|
||||||
|
bool autoSnakeCaseNames,
|
||||||
|
bool autoIdAndDateFields) {
|
||||||
|
var raw = serialize.buildContext(clazz, null, buildStep, resolver,
|
||||||
|
autoSnakeCaseNames != false, autoIdAndDateFields != false);
|
||||||
|
var ctx = new PostgresBuildContext(raw, annotation, resolver, buildStep,
|
||||||
tableName: annotation.tableName?.isNotEmpty == true
|
tableName: annotation.tableName?.isNotEmpty == true
|
||||||
? annotation.tableName
|
? annotation.tableName
|
||||||
: pluralize(new ReCase(clazz.name).snakeCase),
|
: pluralize(new ReCase(clazz.name).snakeCase));
|
||||||
sourceFilename: p.basename(buildStep.inputId.path));
|
List<String> fieldNames = [];
|
||||||
|
|
||||||
for (var field in clazz.fields) {
|
for (var field in raw.fields) {
|
||||||
if (field.getter != null && field.setter != null) {
|
fieldNames.add(field.name);
|
||||||
// Check for relationship. If so, skip.
|
// Check for relationship. If so, skip.
|
||||||
Relationship relationship = findAnnotation<HasOne>(field, HasOne) ??
|
Relationship relationship = null;
|
||||||
|
/* findAnnotation<HasOne>(field, HasOne) ??
|
||||||
findAnnotation<HasMany>(field, HasMany) ??
|
findAnnotation<HasMany>(field, HasMany) ??
|
||||||
findAnnotation<BelongsTo>(field, BelongsTo);
|
findAnnotation<BelongsTo>(field, BelongsTo);*/
|
||||||
|
bool isRelationship = field.metadata.any((ann) {
|
||||||
|
return matchAnnotation(Relationship, ann) ||
|
||||||
|
matchAnnotation(HasMany, ann) ||
|
||||||
|
matchAnnotation(HasOne, ann) ||
|
||||||
|
matchAnnotation(BelongsTo, ann);
|
||||||
|
});
|
||||||
|
|
||||||
if (relationship != null) {
|
if (relationship != null) {
|
||||||
ctx.relationships[field.name] = relationship;
|
ctx.relationships[field.name] = relationship;
|
||||||
continue;
|
continue;
|
||||||
} else print('Hm: ${field.name}');
|
} else if (isRelationship) {
|
||||||
// Check for alias
|
ctx.relationships[field.name] = null;
|
||||||
var alias = findAnnotation<Alias>(field, Alias);
|
continue;
|
||||||
|
|
||||||
if (alias?.name?.isNotEmpty == true) {
|
|
||||||
ctx.aliases[field.name] = alias.name;
|
|
||||||
} else if (autoSnakeCaseNames != false) {
|
|
||||||
ctx.aliases[field.name] = new ReCase(field.name).snakeCase;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for column annotation...
|
|
||||||
var column = findAnnotation<Column>(field, Column);
|
|
||||||
|
|
||||||
if (column == null) {
|
|
||||||
// Guess what kind of column this is...
|
|
||||||
switch (field.type.name) {
|
|
||||||
case 'String':
|
|
||||||
column = const Column(type: ColumnType.VAR_CHAR);
|
|
||||||
break;
|
|
||||||
case 'int':
|
|
||||||
column = const Column(type: ColumnType.INT);
|
|
||||||
break;
|
|
||||||
case 'double':
|
|
||||||
column = const Column(type: ColumnType.DECIMAL);
|
|
||||||
break;
|
|
||||||
case 'num':
|
|
||||||
column = const Column(type: ColumnType.NUMERIC);
|
|
||||||
break;
|
|
||||||
case 'num':
|
|
||||||
column = const Column(type: ColumnType.NUMERIC);
|
|
||||||
break;
|
|
||||||
case 'bool':
|
|
||||||
column = const Column(type: ColumnType.BIT);
|
|
||||||
break;
|
|
||||||
case 'DateTime':
|
|
||||||
column = const Column(type: ColumnType.DATE_TIME);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (column == null)
|
|
||||||
throw 'Cannot infer SQL column type for field "${field.name}" with type "${field.type.name}".';
|
|
||||||
ctx.columnInfo[field.name] = column;
|
|
||||||
ctx.fields.add(field);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check for column annotation...
|
||||||
|
var column = findAnnotation<Column>(field, Column);
|
||||||
|
|
||||||
|
if (column == null) {
|
||||||
|
// Guess what kind of column this is...
|
||||||
|
switch (field.type.name) {
|
||||||
|
case 'String':
|
||||||
|
column = const Column(type: ColumnType.VAR_CHAR);
|
||||||
|
break;
|
||||||
|
case 'int':
|
||||||
|
column = const Column(type: ColumnType.INT);
|
||||||
|
break;
|
||||||
|
case 'double':
|
||||||
|
column = const Column(type: ColumnType.DECIMAL);
|
||||||
|
break;
|
||||||
|
case 'num':
|
||||||
|
column = const Column(type: ColumnType.NUMERIC);
|
||||||
|
break;
|
||||||
|
case 'num':
|
||||||
|
column = const Column(type: ColumnType.NUMERIC);
|
||||||
|
break;
|
||||||
|
case 'bool':
|
||||||
|
column = const Column(type: ColumnType.BIT);
|
||||||
|
break;
|
||||||
|
case 'DateTime':
|
||||||
|
column = const Column(type: ColumnType.TIME_STAMP);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (column == null)
|
||||||
|
throw 'Cannot infer SQL column type for field "${field.name}" with type "${field.type.name}".';
|
||||||
|
ctx.columnInfo[field.name] = column;
|
||||||
}
|
}
|
||||||
|
|
||||||
return ctx;
|
return ctx;
|
||||||
|
|
105
lib/src/builder/postgres/migration.dart
Normal file
105
lib/src/builder/postgres/migration.dart
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:angel_serialize/angel_serialize.dart';
|
||||||
|
import 'package:build/build.dart';
|
||||||
|
import 'package:code_builder/dart/async.dart';
|
||||||
|
import 'package:code_builder/dart/core.dart';
|
||||||
|
import 'package:code_builder/code_builder.dart';
|
||||||
|
import 'package:inflection/inflection.dart';
|
||||||
|
import 'package:path/path.dart' as p;
|
||||||
|
import 'package:recase/recase.dart';
|
||||||
|
import 'package:source_gen/src/annotation.dart';
|
||||||
|
import 'package:source_gen/src/utils.dart';
|
||||||
|
import 'package:source_gen/source_gen.dart';
|
||||||
|
import '../../annotations.dart';
|
||||||
|
import '../../migration.dart';
|
||||||
|
import 'package:angel_serialize/src/find_annotation.dart';
|
||||||
|
import 'build_context.dart';
|
||||||
|
import 'postgres_build_context.dart';
|
||||||
|
|
||||||
|
// TODO: HasOne, HasMany, BelongsTo
|
||||||
|
class SQLMigrationGenerator implements Builder {
|
||||||
|
/// If "true" (default), then field names will automatically be (de)serialized as snake_case.
|
||||||
|
final bool autoSnakeCaseNames;
|
||||||
|
|
||||||
|
/// If "true" (default), then
|
||||||
|
final bool autoIdAndDateFields;
|
||||||
|
|
||||||
|
const SQLMigrationGenerator(
|
||||||
|
{this.autoSnakeCaseNames: true, this.autoIdAndDateFields: true});
|
||||||
|
|
||||||
|
@override
|
||||||
|
Map<String, List<String>> get buildExtensions => {
|
||||||
|
'.dart': ['.up.g.sql', '.down.g.sql']
|
||||||
|
};
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future build(BuildStep buildStep) async {
|
||||||
|
var resolver = await buildStep.resolver;
|
||||||
|
var up = new StringBuffer();
|
||||||
|
var down = new StringBuffer();
|
||||||
|
|
||||||
|
if (!await resolver.isLibrary(buildStep.inputId)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var lib = await resolver.getLibrary(buildStep.inputId);
|
||||||
|
var elements = getElementsFromLibraryElement(lib);
|
||||||
|
|
||||||
|
if (!elements.any(
|
||||||
|
(el) => el.metadata.any((ann) => matchAnnotation(ORM, ann)))) return;
|
||||||
|
|
||||||
|
generateSqlMigrations(lib, resolver, buildStep, up, down);
|
||||||
|
buildStep.writeAsString(
|
||||||
|
buildStep.inputId.changeExtension('.up.g.sql'), up.toString());
|
||||||
|
buildStep.writeAsString(
|
||||||
|
buildStep.inputId.changeExtension('.down.g.sql'), down.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
void generateSqlMigrations(LibraryElement libraryElement, Resolver resolver,
|
||||||
|
BuildStep buildStep, StringBuffer up, StringBuffer down) {
|
||||||
|
List<String> done = [];
|
||||||
|
for (var element in getElementsFromLibraryElement(libraryElement)) {
|
||||||
|
if (element is ClassElement && !done.contains(element.name)) {
|
||||||
|
var ann = element.metadata
|
||||||
|
.firstWhere((a) => matchAnnotation(ORM, a), orElse: () => null);
|
||||||
|
if (ann != null) {
|
||||||
|
var ctx = buildContext(
|
||||||
|
element,
|
||||||
|
instantiateAnnotation(ann),
|
||||||
|
buildStep,
|
||||||
|
resolver,
|
||||||
|
autoSnakeCaseNames != false,
|
||||||
|
autoIdAndDateFields != false);
|
||||||
|
buildUpMigration(ctx, up);
|
||||||
|
buildDownMigration(ctx, down);
|
||||||
|
done.add(element.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void buildUpMigration(PostgresBuildContext ctx, StringBuffer buf) {
|
||||||
|
buf.writeln('CREATE TABLE "${ctx.tableName}" (');
|
||||||
|
|
||||||
|
int i = 0;
|
||||||
|
ctx.columnInfo.forEach((name, col) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
var key = ctx.resolveFieldName(name);
|
||||||
|
buf.write(' "$key" ${col.type.name}');
|
||||||
|
|
||||||
|
if (col.index == IndexType.PRIMARY_KEY)
|
||||||
|
buf.write(' PRIMARY KEY');
|
||||||
|
else if (col.index == IndexType.UNIQUE) buf.write(' UNIQUE');
|
||||||
|
|
||||||
|
if (col.nullable != true) buf.write(' NOT NULLABLE');
|
||||||
|
});
|
||||||
|
|
||||||
|
buf.writeln();
|
||||||
|
buf.writeln(');');
|
||||||
|
}
|
||||||
|
|
||||||
|
void buildDownMigration(PostgresBuildContext ctx, StringBuffer buf) {
|
||||||
|
buf.writeln('DROP TABLE "${ctx.tableName}";');
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,10 +8,12 @@ import 'package:code_builder/code_builder.dart';
|
||||||
import 'package:inflection/inflection.dart';
|
import 'package:inflection/inflection.dart';
|
||||||
import 'package:path/path.dart' as p;
|
import 'package:path/path.dart' as p;
|
||||||
import 'package:recase/recase.dart';
|
import 'package:recase/recase.dart';
|
||||||
|
import 'package:source_gen/src/annotation.dart';
|
||||||
|
import 'package:source_gen/src/utils.dart';
|
||||||
import 'package:source_gen/source_gen.dart';
|
import 'package:source_gen/source_gen.dart';
|
||||||
import '../../annotations.dart';
|
import '../../annotations.dart';
|
||||||
import '../../migration.dart';
|
import '../../migration.dart';
|
||||||
import '../find_annotation.dart';
|
import 'package:angel_serialize/src/find_annotation.dart';
|
||||||
import 'build_context.dart';
|
import 'build_context.dart';
|
||||||
import 'postgres_build_context.dart';
|
import 'postgres_build_context.dart';
|
||||||
|
|
||||||
|
@ -20,33 +22,59 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
/// If `true` (default), then field names will automatically be (de)serialized as snake_case.
|
/// If `true` (default), then field names will automatically be (de)serialized as snake_case.
|
||||||
final bool autoSnakeCaseNames;
|
final bool autoSnakeCaseNames;
|
||||||
|
|
||||||
const PostgresORMGenerator({this.autoSnakeCaseNames: true});
|
/// If `true` (default), then
|
||||||
|
final bool autoIdAndDateFields;
|
||||||
|
|
||||||
|
const PostgresORMGenerator(
|
||||||
|
{this.autoSnakeCaseNames: true, this.autoIdAndDateFields: true});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Future<String> generateForAnnotatedElement(
|
Future<String> generateForAnnotatedElement(
|
||||||
Element element, ORM annotation, BuildStep buildStep) {
|
Element element, ORM annotation, BuildStep buildStep) async {
|
||||||
if (element is! ClassElement)
|
if (element is! ClassElement)
|
||||||
throw 'Only classes can be annotated with @model.';
|
throw 'Only classes can be annotated with @serializable.';
|
||||||
var context =
|
var resolver = await buildStep.resolver;
|
||||||
buildContext(element, annotation, buildStep, autoSnakeCaseNames);
|
return prettyToSource(
|
||||||
return new Future<String>.value(
|
generateOrmLibrary(element.library, resolver, buildStep).buildAst());
|
||||||
prettyToSource(generateOrmLibrary(context).buildAst()));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LibraryBuilder generateOrmLibrary(PostgresBuildContext ctx) {
|
LibraryBuilder generateOrmLibrary(
|
||||||
|
LibraryElement libraryElement, Resolver resolver, BuildStep buildStep) {
|
||||||
var lib = new LibraryBuilder();
|
var lib = new LibraryBuilder();
|
||||||
lib.addDirective(new ImportBuilder('dart:async'));
|
lib.addDirective(new ImportBuilder('dart:async'));
|
||||||
lib.addDirective(new ImportBuilder('package:angel_orm/angel_orm.dart'));
|
lib.addDirective(new ImportBuilder('package:angel_orm/angel_orm.dart'));
|
||||||
lib.addDirective(new ImportBuilder('package:postgres/postgres.dart'));
|
lib.addDirective(new ImportBuilder('package:postgres/postgres.dart'));
|
||||||
lib.addDirective(new ImportBuilder(ctx.sourceFilename));
|
lib.addDirective(new ImportBuilder(p.basename(buildStep.inputId.path)));
|
||||||
lib.addMember(buildQueryClass(ctx));
|
|
||||||
lib.addMember(buildWhereClass(ctx));
|
List<String> done = [];
|
||||||
|
for (var element in getElementsFromLibraryElement(libraryElement)) {
|
||||||
|
if (element is ClassElement && !done.contains(element.name)) {
|
||||||
|
var ann = element.metadata
|
||||||
|
.firstWhere((a) => matchAnnotation(ORM, a), orElse: () => null);
|
||||||
|
if (ann != null) {
|
||||||
|
var ctx = buildContext(
|
||||||
|
element,
|
||||||
|
instantiateAnnotation(ann),
|
||||||
|
buildStep,
|
||||||
|
resolver,
|
||||||
|
autoSnakeCaseNames != false,
|
||||||
|
autoIdAndDateFields != false);
|
||||||
|
lib.addMember(buildQueryClass(ctx));
|
||||||
|
lib.addMember(buildWhereClass(ctx));
|
||||||
|
done.add(element.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return lib;
|
return lib;
|
||||||
}
|
}
|
||||||
|
|
||||||
ClassBuilder buildQueryClass(PostgresBuildContext ctx) {
|
ClassBuilder buildQueryClass(PostgresBuildContext ctx) {
|
||||||
var clazz = new ClassBuilder(ctx.queryClassName);
|
var clazz = new ClassBuilder(ctx.queryClassName);
|
||||||
|
|
||||||
|
// Add constructor + field
|
||||||
|
var PostgreSQLConnection = new TypeBuilder('PostgreSQLConnection');
|
||||||
|
var connection = reference('connection');
|
||||||
|
|
||||||
// Add or + not
|
// Add or + not
|
||||||
for (var relation in ['and', 'or', 'not']) {
|
for (var relation in ['and', 'or', 'not']) {
|
||||||
clazz.addField(varFinal('_$relation',
|
clazz.addField(varFinal('_$relation',
|
||||||
|
@ -73,6 +101,12 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
type: new TypeBuilder(ctx.whereClassName),
|
type: new TypeBuilder(ctx.whereClassName),
|
||||||
value: new TypeBuilder(ctx.whereClassName).newInstance([])));
|
value: new TypeBuilder(ctx.whereClassName).newInstance([])));
|
||||||
|
|
||||||
|
// Add toSql()...
|
||||||
|
clazz.addMethod(buildToSqlMethod(ctx));
|
||||||
|
|
||||||
|
// Add parseRow()...
|
||||||
|
clazz.addMethod(buildParseRowMethod(ctx), asStatic: true);
|
||||||
|
|
||||||
// Add get()...
|
// Add get()...
|
||||||
clazz.addMethod(buildGetMethod(ctx));
|
clazz.addMethod(buildGetMethod(ctx));
|
||||||
|
|
||||||
|
@ -94,16 +128,77 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
returnType: new TypeBuilder('Stream',
|
returnType: new TypeBuilder('Stream',
|
||||||
genericTypes: [new TypeBuilder(ctx.modelClassName)]),
|
genericTypes: [new TypeBuilder(ctx.modelClassName)]),
|
||||||
returns: new TypeBuilder(ctx.queryClassName)
|
returns: new TypeBuilder(ctx.queryClassName)
|
||||||
.newInstance([]).invoke('get', [])),
|
.newInstance([]).invoke('get', [connection]))
|
||||||
|
..addPositional(parameter('connection', [PostgreSQLConnection])),
|
||||||
asStatic: true);
|
asStatic: true);
|
||||||
|
|
||||||
return clazz;
|
return clazz;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildToSqlMethod(PostgresBuildContext ctx) {
|
||||||
|
// TODO: Bake relations into SQL queries
|
||||||
|
var meth = new MethodBuilder('toSql', returnType: lib$core.String);
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildParseRowMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('parseRow',
|
||||||
|
returnType: new TypeBuilder(ctx.modelClassName));
|
||||||
|
meth.addPositional(parameter('row', [lib$core.List]));
|
||||||
|
var row = reference('row');
|
||||||
|
var DATE_YMD_HMS = reference('DATE_YMD_HMS');
|
||||||
|
|
||||||
|
// We want to create a Map using the SQL row.
|
||||||
|
Map<String, ExpressionBuilder> data = {};
|
||||||
|
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
// TODO: Support relations...
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
var name = ctx.resolveFieldName(field.name);
|
||||||
|
var rowKey = row[literal(i++)];
|
||||||
|
|
||||||
|
if (field.type.name == 'DateTime') {
|
||||||
|
// TODO: Handle DATE and not just DATETIME
|
||||||
|
data[name] = DATE_YMD_HMS.invoke('parse', [rowKey]);
|
||||||
|
} else if (field.name == 'id' && ctx.shimmed.containsKey('id')) {
|
||||||
|
data[name] = rowKey.invoke('toString', []);
|
||||||
|
} else if (field.type.isAssignableTo(ctx.typeProvider.boolType)) {
|
||||||
|
data[name] = rowKey.equals(literal(1));
|
||||||
|
} else
|
||||||
|
data[name] = rowKey;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Then, call a .fromJson() constructor
|
||||||
|
meth.addStatement(new TypeBuilder(ctx.modelClassName)
|
||||||
|
.newInstance([map(data)], constructor: 'fromJson').asReturn());
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
MethodBuilder buildGetMethod(PostgresBuildContext ctx) {
|
MethodBuilder buildGetMethod(PostgresBuildContext ctx) {
|
||||||
var meth = new MethodBuilder('get',
|
var meth = new MethodBuilder('get',
|
||||||
returnType: new TypeBuilder('Stream',
|
returnType: new TypeBuilder('Stream',
|
||||||
genericTypes: [new TypeBuilder(ctx.modelClassName)]));
|
genericTypes: [new TypeBuilder(ctx.modelClassName)]));
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [new TypeBuilder('PostgreSQLConnection')]));
|
||||||
|
var streamController = new TypeBuilder('StreamController',
|
||||||
|
genericTypes: [new TypeBuilder(ctx.modelClassName)]);
|
||||||
|
var ctrl = reference('ctrl'), connection = reference('connection');
|
||||||
|
meth.addStatement(varField('ctrl',
|
||||||
|
type: streamController, value: streamController.newInstance([])));
|
||||||
|
|
||||||
|
// Invoke query...
|
||||||
|
var future = connection.invoke('query', [reference('toSql').call([])]);
|
||||||
|
var catchError = ctrl.property('addError');
|
||||||
|
var then = new MethodBuilder.closure()..addPositional(parameter('rows'));
|
||||||
|
then.addStatement(reference('rows')
|
||||||
|
.invoke('map', [reference('parseRow')]).invoke(
|
||||||
|
'forEach', [ctrl.property('add')]));
|
||||||
|
then.addStatement(ctrl.invoke('close', []));
|
||||||
|
meth.addStatement(
|
||||||
|
future.invoke('then', [then]).invoke('catchError', [catchError]));
|
||||||
|
meth.addStatement(ctrl.property('stream').asReturn());
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -111,6 +206,19 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
var meth = new MethodBuilder('getOne',
|
var meth = new MethodBuilder('getOne',
|
||||||
returnType: new TypeBuilder('Future',
|
returnType: new TypeBuilder('Future',
|
||||||
genericTypes: [new TypeBuilder(ctx.modelClassName)]));
|
genericTypes: [new TypeBuilder(ctx.modelClassName)]));
|
||||||
|
meth.addPositional(parameter('id', [lib$core.int]));
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [new TypeBuilder('PostgreSQLConnection')]));
|
||||||
|
meth.addStatement(reference('connection').invoke('query', [
|
||||||
|
literal('SELECT * FROM `${ctx.tableName}` WHERE `id` = @id;')
|
||||||
|
], namedArguments: {
|
||||||
|
'substitutionValues': map({'id': reference('id')})
|
||||||
|
}).invoke('then', [
|
||||||
|
new MethodBuilder.closure(
|
||||||
|
returns:
|
||||||
|
reference('parseRow').call([reference('rows').property('first')]))
|
||||||
|
..addPositional(parameter('rows'))
|
||||||
|
]).asReturn());
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,7 +237,9 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
}
|
}
|
||||||
|
|
||||||
MethodBuilder buildInsertMethod(PostgresBuildContext ctx) {
|
MethodBuilder buildInsertMethod(PostgresBuildContext ctx) {
|
||||||
|
// TODO: Auto-set createdAt, updatedAt...
|
||||||
var meth = new MethodBuilder('insert',
|
var meth = new MethodBuilder('insert',
|
||||||
|
modifier: MethodModifier.asAsync,
|
||||||
returnType: new TypeBuilder('Future',
|
returnType: new TypeBuilder('Future',
|
||||||
genericTypes: [new TypeBuilder(ctx.modelClassName)]));
|
genericTypes: [new TypeBuilder(ctx.modelClassName)]));
|
||||||
meth.addPositional(
|
meth.addPositional(
|
||||||
|
@ -145,6 +255,96 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
meth.addNamed(p);
|
meth.addNamed(p);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
var buf = new StringBuffer('INSERT INTO `${ctx.tableName}` (');
|
||||||
|
for (int i = 0; i < ctx.fields.length; i++) {
|
||||||
|
if (i > 0) buf.write(', ');
|
||||||
|
var key = ctx.resolveFieldName(ctx.fields[i].name);
|
||||||
|
buf.write('`$key`');
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write(' VALUES (');
|
||||||
|
for (int i = 0; i < ctx.fields.length; i++) {
|
||||||
|
if (i > 0) buf.write(', ');
|
||||||
|
buf.write('@${ctx.fields[i].name}');
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write(');');
|
||||||
|
|
||||||
|
Map<String, ExpressionBuilder> substitutionValues = {};
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
substitutionValues[field.name] = reference(field.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
// Create StringBuffer
|
||||||
|
meth.addStatement(varField('buf',
|
||||||
|
type: lib$core.StringBuffer,
|
||||||
|
value: lib$core.StringBuffer.newInstance([])));
|
||||||
|
var buf = reference('buf');
|
||||||
|
|
||||||
|
// Create "INSERT INTO segment"
|
||||||
|
var fieldNames = ctx.fields
|
||||||
|
.map((f) => ctx.resolveFieldName(f.name))
|
||||||
|
.map((k) => '`$k`')
|
||||||
|
.join(', ');
|
||||||
|
var insertInto =
|
||||||
|
literal('INSERT INTO `${ctx.tableName}` ($fieldNames) VALUES (');
|
||||||
|
meth.addStatement(buf.invoke('write', [insertInto]));
|
||||||
|
|
||||||
|
// Write all fields
|
||||||
|
int i = 0;
|
||||||
|
var backtick = literal('`');
|
||||||
|
var numType = ctx.typeProvider.numType;
|
||||||
|
var boolType = ctx.typeProvider.boolType;
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
var ref = reference(field.name);
|
||||||
|
ExpressionBuilder value;
|
||||||
|
|
||||||
|
// Handle numbers
|
||||||
|
if (field.type.isAssignableTo(numType)) {
|
||||||
|
value = ref;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle boolean
|
||||||
|
else if (field.type.isAssignableTo(numType)) {
|
||||||
|
value = ref.equals(literal(true)).ternary(literal(1), literal(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle DateTime
|
||||||
|
else if (field.type.isAssignableTo(ctx.dateTimeType)) {
|
||||||
|
// TODO: DATE and not just DATETIME
|
||||||
|
value = reference('DATE_YMD_HMS').invoke('format', [ref]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle anything else...
|
||||||
|
// TODO: Escape SQL strings???
|
||||||
|
else {
|
||||||
|
value = backtick + (ref.invoke('toString', [])) + backtick;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i++ > 0) meth.addStatement(buf.invoke('write', [literal(', ')]));
|
||||||
|
meth.addStatement(ifThen(ref.equals(literal(null)), [
|
||||||
|
buf.invoke('write', [literal('NULL')]),
|
||||||
|
elseThen([
|
||||||
|
buf.invoke('write', [value])
|
||||||
|
])
|
||||||
|
]));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Finalize buffer
|
||||||
|
meth.addStatement(buf.invoke('write', [literal(');')]));
|
||||||
|
meth.addStatement(varField('query', value: buf.invoke('toString', [])));*/
|
||||||
|
|
||||||
|
var connection = reference('connection');
|
||||||
|
var query = literal(buf.toString());
|
||||||
|
var result = reference('result');
|
||||||
|
meth.addStatement(varField('result',
|
||||||
|
value: connection.invoke('query', [
|
||||||
|
query
|
||||||
|
], namedArguments: {
|
||||||
|
'substitutionValues': map(substitutionValues)
|
||||||
|
}).asAwait()));
|
||||||
|
meth.addStatement(reference('parseRow').call([result]).asReturn());
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,29 +1,56 @@
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:analyzer/dart/element/type.dart';
|
||||||
|
import 'package:analyzer/src/generated/resolver.dart';
|
||||||
|
import 'package:build/build.dart';
|
||||||
|
import 'package:angel_serialize/context.dart';
|
||||||
import '../../annotations.dart';
|
import '../../annotations.dart';
|
||||||
import '../../migration.dart';
|
import '../../migration.dart';
|
||||||
import '../../relations.dart';
|
import '../../relations.dart';
|
||||||
|
|
||||||
class PostgresBuildContext {
|
class PostgresBuildContext extends BuildContext {
|
||||||
final Map<String, String> aliases = {};
|
DartType _dateTimeTypeCache;
|
||||||
|
LibraryElement _libraryCache;
|
||||||
|
TypeProvider _typeProviderCache;
|
||||||
final Map<String, Column> columnInfo = {};
|
final Map<String, Column> columnInfo = {};
|
||||||
final Map<String, IndexType> indices = {};
|
final Map<String, IndexType> indices = {};
|
||||||
final Map<String, Relationship> relationships = {};
|
final Map<String, Relationship> relationships = {};
|
||||||
final String originalClassName, tableName, sourceFilename;
|
final String tableName;
|
||||||
final ORM annotation;
|
final ORM ormAnnotation;
|
||||||
// Todo: We can use analyzer to copy straight from Model class
|
final BuildContext raw;
|
||||||
final List<FieldElement> fields = [];
|
final Resolver resolver;
|
||||||
|
final BuildStep buildStep;
|
||||||
String primaryKeyName = 'id';
|
String primaryKeyName = 'id';
|
||||||
|
|
||||||
PostgresBuildContext(this.annotation,
|
PostgresBuildContext(
|
||||||
{this.originalClassName, this.tableName, this.sourceFilename});
|
this.raw, this.ormAnnotation, this.resolver, this.buildStep,
|
||||||
|
{this.tableName})
|
||||||
|
: super(raw.annotation,
|
||||||
|
originalClassName: raw.originalClassName,
|
||||||
|
sourceFilename: raw.sourceFilename);
|
||||||
|
|
||||||
String get modelClassName => originalClassName.startsWith('_')
|
List<FieldElement> get fields => raw.fields;
|
||||||
? originalClassName.substring(1)
|
|
||||||
: originalClassName;
|
Map<String, String> get aliases => raw.aliases;
|
||||||
|
|
||||||
|
Map<String, bool> get shimmed => raw.shimmed;
|
||||||
|
|
||||||
|
String get sourceFilename => raw.sourceFilename;
|
||||||
|
|
||||||
|
String get modelClassName => raw.modelClassName;
|
||||||
|
|
||||||
|
String get originalClassName => raw.originalClassName;
|
||||||
|
|
||||||
String get queryClassName => modelClassName + 'Query';
|
String get queryClassName => modelClassName + 'Query';
|
||||||
String get whereClassName => queryClassName + 'Where';
|
String get whereClassName => queryClassName + 'Where';
|
||||||
|
|
||||||
String resolveFieldName(String name) =>
|
LibraryElement get library =>
|
||||||
aliases.containsKey(name) ? aliases[name] : name;
|
_libraryCache ??= resolver.getLibrary(buildStep.inputId);
|
||||||
|
|
||||||
|
DartType get dateTimeType => _dateTimeTypeCache ??= (resolver.libraries
|
||||||
|
.firstWhere((lib) => lib.isDartCore)
|
||||||
|
.getType('DateTime')
|
||||||
|
.type);
|
||||||
|
|
||||||
|
TypeProvider get typeProvider =>
|
||||||
|
_typeProviderCache ??= library.context.typeProvider;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
import 'package:intl/intl.dart';
|
import 'package:intl/intl.dart';
|
||||||
|
|
||||||
|
|
||||||
|
final DateFormat DATE_YMD = new DateFormat('yyyy-MM-dd');
|
||||||
|
final DateFormat DATE_YMD_HMS = new DateFormat('yyyy-MM-dd HH:mm:ss');
|
||||||
|
|
||||||
abstract class SqlExpressionBuilder {
|
abstract class SqlExpressionBuilder {
|
||||||
bool get hasValue;
|
bool get hasValue;
|
||||||
String compile();
|
String compile();
|
||||||
|
@ -123,8 +127,6 @@ class BooleanSqlExpressionBuilder implements SqlExpressionBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
class DateTimeSqlExpressionBuilder implements SqlExpressionBuilder {
|
class DateTimeSqlExpressionBuilder implements SqlExpressionBuilder {
|
||||||
static final DateFormat _ymd = new DateFormat('yy-MM-dd');
|
|
||||||
static final DateFormat _ymdHms = new DateFormat('yy-MM-dd HH:mm:ss');
|
|
||||||
final NumericSqlExpressionBuilder<int> year =
|
final NumericSqlExpressionBuilder<int> year =
|
||||||
new NumericSqlExpressionBuilder<int>(),
|
new NumericSqlExpressionBuilder<int>(),
|
||||||
month = new NumericSqlExpressionBuilder<int>(),
|
month = new NumericSqlExpressionBuilder<int>(),
|
||||||
|
@ -148,7 +150,7 @@ class DateTimeSqlExpressionBuilder implements SqlExpressionBuilder {
|
||||||
second.hasValue;
|
second.hasValue;
|
||||||
|
|
||||||
bool _change(String _op, DateTime dt, bool time) {
|
bool _change(String _op, DateTime dt, bool time) {
|
||||||
var dateString = time ? _ymdHms.format(dt) : _ymd.format(dt);
|
var dateString = time ? DATE_YMD_HMS.format(dt) : DATE_YMD.format(dt);
|
||||||
_raw = '`$columnName` $_op \'$dateString\'';
|
_raw = '`$columnName` $_op \'$dateString\'';
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ class Relationship {
|
||||||
|
|
||||||
class HasMany extends Relationship {
|
class HasMany extends Relationship {
|
||||||
const HasMany(
|
const HasMany(
|
||||||
{String localKey,
|
{String localKey: 'id',
|
||||||
String foreignKey,
|
String foreignKey,
|
||||||
String foreignTable,
|
String foreignTable,
|
||||||
bool cascadeOnDelete: false})
|
bool cascadeOnDelete: false})
|
||||||
|
@ -28,7 +28,7 @@ const HasMany hasMany = const HasMany();
|
||||||
|
|
||||||
class HasOne extends Relationship {
|
class HasOne extends Relationship {
|
||||||
const HasOne(
|
const HasOne(
|
||||||
{String localKey,
|
{String localKey: 'id',
|
||||||
String foreignKey,
|
String foreignKey,
|
||||||
String foreignTable,
|
String foreignTable,
|
||||||
bool cascadeOnDelete: false})
|
bool cascadeOnDelete: false})
|
||||||
|
@ -42,7 +42,8 @@ class HasOne extends Relationship {
|
||||||
const HasOne hasOne = const HasOne();
|
const HasOne hasOne = const HasOne();
|
||||||
|
|
||||||
class BelongsTo extends Relationship {
|
class BelongsTo extends Relationship {
|
||||||
const BelongsTo({String localKey, String foreignKey, String foreignTable})
|
const BelongsTo(
|
||||||
|
{String localKey: 'id', String foreignKey, String foreignTable})
|
||||||
: super._(
|
: super._(
|
||||||
localKey: localKey,
|
localKey: localKey,
|
||||||
foreignKey: foreignKey,
|
foreignKey: foreignKey,
|
||||||
|
|
|
@ -21,3 +21,6 @@ dev_dependencies:
|
||||||
http: ">= 0.11.3 < 0.12.0"
|
http: ">= 0.11.3 < 0.12.0"
|
||||||
postgres: ">=0.9.5 <1.0.0"
|
postgres: ">=0.9.5 <1.0.0"
|
||||||
test: ">= 0.12.13 < 0.13.0"
|
test: ">= 0.12.13 < 0.13.0"
|
||||||
|
dependency_overrides:
|
||||||
|
source_gen:
|
||||||
|
path: ../../Dart/source_gen
|
|
@ -1,3 +1,6 @@
|
||||||
|
import 'dart:io';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
import 'package:test/test.dart';
|
import 'package:test/test.dart';
|
||||||
import 'models/car.dart';
|
import 'models/car.dart';
|
||||||
import 'models/car.orm.g.dart';
|
import 'models/car.orm.g.dart';
|
||||||
|
@ -5,6 +8,23 @@ import 'models/car.orm.g.dart';
|
||||||
final DateTime MILENNIUM = new DateTime.utc(2000, 1, 1);
|
final DateTime MILENNIUM = new DateTime.utc(2000, 1, 1);
|
||||||
|
|
||||||
main() {
|
main() {
|
||||||
|
PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
setUp(() async {
|
||||||
|
connection = new PostgreSQLConnection('127.0.0.1', 0, '');
|
||||||
|
await connection.open();
|
||||||
|
|
||||||
|
// Create temp table
|
||||||
|
var query = await new File('test/models/car.sql').readAsString();
|
||||||
|
await connection.execute(query);
|
||||||
|
});
|
||||||
|
|
||||||
|
tearDown(() async {
|
||||||
|
// Drop `cars`
|
||||||
|
await connection.execute('DROP TABLE `cars`;');
|
||||||
|
await connection.close();
|
||||||
|
});
|
||||||
|
|
||||||
test('to where', () {
|
test('to where', () {
|
||||||
var query = new CarQuery();
|
var query = new CarQuery();
|
||||||
query.where
|
query.where
|
||||||
|
@ -12,11 +32,34 @@ main() {
|
||||||
..recalledAt.lessThanOrEqualTo(MILENNIUM, includeTime: false);
|
..recalledAt.lessThanOrEqualTo(MILENNIUM, includeTime: false);
|
||||||
var whereClause = query.where.toWhereClause();
|
var whereClause = query.where.toWhereClause();
|
||||||
print('Where clause: $whereClause');
|
print('Where clause: $whereClause');
|
||||||
expect(whereClause, "WHERE `family_friendly` = 1 AND `recalled_at` <= '00-01-01'");
|
expect(whereClause,
|
||||||
|
"WHERE `family_friendly` = 1 AND `recalled_at` <= '00-01-01'");
|
||||||
});
|
});
|
||||||
|
|
||||||
test('insert', () async {
|
test('parseRow', () {
|
||||||
var car = await CarQuery.insert(null, make: 'Mazda', familyFriendly: false);
|
var row = [
|
||||||
|
0,
|
||||||
|
'Mazda',
|
||||||
|
'CX9',
|
||||||
|
1,
|
||||||
|
DATE_YMD_HMS.format(MILENNIUM),
|
||||||
|
DATE_YMD_HMS.format(MILENNIUM),
|
||||||
|
DATE_YMD_HMS.format(MILENNIUM)
|
||||||
|
];
|
||||||
|
print(row);
|
||||||
|
var car = CarQuery.parseRow(row);
|
||||||
print(car.toJson());
|
print(car.toJson());
|
||||||
}, skip: 'Insert not yet implemented');
|
expect(car.id, '0');
|
||||||
|
expect(car.make, 'Mazda');
|
||||||
|
expect(car.description, 'CX9');
|
||||||
|
expect(car.familyFriendly, true);
|
||||||
|
expect(MILENNIUM.toIso8601String(),
|
||||||
|
startsWith(car.recalledAt.toIso8601String()));
|
||||||
|
expect(MILENNIUM.toIso8601String(),
|
||||||
|
startsWith(car.createdAt.toIso8601String()));
|
||||||
|
expect(MILENNIUM.toIso8601String(),
|
||||||
|
startsWith(car.updatedAt.toIso8601String()));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert', () async {});
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,6 @@ library angel_orm.test.models.car;
|
||||||
import 'package:angel_framework/common.dart';
|
import 'package:angel_framework/common.dart';
|
||||||
import 'package:angel_orm/angel_orm.dart';
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
import 'package:angel_serialize/angel_serialize.dart';
|
import 'package:angel_serialize/angel_serialize.dart';
|
||||||
import 'tire.dart';
|
|
||||||
part 'car.g.dart';
|
part 'car.g.dart';
|
||||||
|
|
||||||
@serializable
|
@serializable
|
||||||
|
@ -13,6 +12,4 @@ class _Car extends Model {
|
||||||
String description;
|
String description;
|
||||||
bool familyFriendly;
|
bool familyFriendly;
|
||||||
DateTime recalledAt;
|
DateTime recalledAt;
|
||||||
@hasMany
|
|
||||||
List<Tire> tires;
|
|
||||||
}
|
}
|
||||||
|
|
1
test/models/car.down.g.sql
Normal file
1
test/models/car.down.g.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
DROP TABLE "cars";
|
|
@ -8,6 +8,9 @@ part of angel_orm.test.models.car;
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class Car extends _Car {
|
class Car extends _Car {
|
||||||
|
@override
|
||||||
|
String id;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String make;
|
String make;
|
||||||
|
|
||||||
|
@ -21,34 +24,51 @@ class Car extends _Car {
|
||||||
DateTime recalledAt;
|
DateTime recalledAt;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
List tires;
|
DateTime createdAt;
|
||||||
|
|
||||||
|
@override
|
||||||
|
DateTime updatedAt;
|
||||||
|
|
||||||
Car(
|
Car(
|
||||||
{this.make,
|
{this.id,
|
||||||
|
this.make,
|
||||||
this.description,
|
this.description,
|
||||||
this.familyFriendly,
|
this.familyFriendly,
|
||||||
this.recalledAt,
|
this.recalledAt,
|
||||||
this.tires});
|
this.createdAt,
|
||||||
|
this.updatedAt});
|
||||||
|
|
||||||
factory Car.fromJson(Map data) {
|
factory Car.fromJson(Map data) {
|
||||||
return new Car(
|
return new Car(
|
||||||
|
id: data['id'],
|
||||||
make: data['make'],
|
make: data['make'],
|
||||||
description: data['description'],
|
description: data['description'],
|
||||||
familyFriendly: data['familyFriendly'],
|
familyFriendly: data['family_friendly'],
|
||||||
recalledAt: data['recalledAt'] is DateTime
|
recalledAt: data['recalled_at'] is DateTime
|
||||||
? data['recalledAt']
|
? data['recalled_at']
|
||||||
: (data['recalledAt'] is String
|
: (data['recalled_at'] is String
|
||||||
? DateTime.parse(data['recalledAt'])
|
? DateTime.parse(data['recalled_at'])
|
||||||
: null),
|
: null),
|
||||||
tires: data['tires']);
|
createdAt: data['created_at'] is DateTime
|
||||||
|
? data['created_at']
|
||||||
|
: (data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: null),
|
||||||
|
updatedAt: data['updated_at'] is DateTime
|
||||||
|
? data['updated_at']
|
||||||
|
: (data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: null));
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic> toJson() => {
|
Map<String, dynamic> toJson() => {
|
||||||
|
'id': id,
|
||||||
'make': make,
|
'make': make,
|
||||||
'description': description,
|
'description': description,
|
||||||
'familyFriendly': familyFriendly,
|
'family_friendly': familyFriendly,
|
||||||
'recalledAt': recalledAt == null ? null : recalledAt.toIso8601String(),
|
'recalled_at': recalledAt == null ? null : recalledAt.toIso8601String(),
|
||||||
'tires': tires
|
'created_at': createdAt == null ? null : createdAt.toIso8601String(),
|
||||||
|
'updated_at': updatedAt == null ? null : updatedAt.toIso8601String()
|
||||||
};
|
};
|
||||||
|
|
||||||
static Car parse(Map map) => new Car.fromJson(map);
|
static Car parse(Map map) => new Car.fromJson(map);
|
||||||
|
|
|
@ -5,6 +5,142 @@
|
||||||
// Target: class _Car
|
// Target: class _Car
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
// Error: type 'SuperConstructorInvocationImpl' is not a subtype of type 'ConstructorFieldInitializer' in type cast where
|
import 'dart:async';
|
||||||
// SuperConstructorInvocationImpl is from package:analyzer/src/dart/ast/ast.dart
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
// ConstructorFieldInitializer is from package:analyzer/dart/ast/ast.dart
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'car.dart';
|
||||||
|
|
||||||
|
class CarQuery {
|
||||||
|
final List<String> _and = [];
|
||||||
|
|
||||||
|
final List<String> _or = [];
|
||||||
|
|
||||||
|
final List<String> _not = [];
|
||||||
|
|
||||||
|
final CarQueryWhere where = new CarQueryWhere();
|
||||||
|
|
||||||
|
void and(CarQuery other) {
|
||||||
|
var compiled = other.where.toWhereClause();
|
||||||
|
if (compiled != null) {
|
||||||
|
_and.add(compiled);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void or(CarQuery other) {
|
||||||
|
var compiled = other.where.toWhereClause();
|
||||||
|
if (compiled != null) {
|
||||||
|
_or.add(compiled);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void not(CarQuery other) {
|
||||||
|
var compiled = other.where.toWhereClause();
|
||||||
|
if (compiled != null) {
|
||||||
|
_not.add(compiled);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
String toSql() {}
|
||||||
|
|
||||||
|
static Car parseRow(List row) {
|
||||||
|
return new Car.fromJson({
|
||||||
|
'id': row[0].toString(),
|
||||||
|
'make': row[1],
|
||||||
|
'description': row[2],
|
||||||
|
'family_friendly': row[3] == 1,
|
||||||
|
'recalled_at': DATE_YMD_HMS.parse(row[4]),
|
||||||
|
'created_at': DATE_YMD_HMS.parse(row[5]),
|
||||||
|
'updated_at': DATE_YMD_HMS.parse(row[6])
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<Car> get(PostgreSQLConnection connection) {
|
||||||
|
StreamController<Car> ctrl = new StreamController<Car>();
|
||||||
|
connection.query(toSql()).then((rows) {
|
||||||
|
rows.map(parseRow).forEach(ctrl.add);
|
||||||
|
ctrl.close();
|
||||||
|
}).catchError(ctrl.addError);
|
||||||
|
return ctrl.stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Car> getOne(int id, PostgreSQLConnection connection) {
|
||||||
|
return connection.query('SELECT * FROM `cars` WHERE `id` = @id;',
|
||||||
|
substitutionValues: {'id': id}).then((rows) => parseRow(rows.first));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Car> update() {}
|
||||||
|
|
||||||
|
Future<Car> delete() {}
|
||||||
|
|
||||||
|
static Future<Car> insert(PostgreSQLConnection connection,
|
||||||
|
{String id,
|
||||||
|
String make,
|
||||||
|
String description,
|
||||||
|
bool familyFriendly,
|
||||||
|
DateTime recalledAt,
|
||||||
|
DateTime createdAt,
|
||||||
|
DateTime updatedAt}) async {
|
||||||
|
var result = await connection.query(
|
||||||
|
'INSERT INTO `cars` (`id`, `make`, `description`, `family_friendly`, `recalled_at`, `created_at`, `updated_at` VALUES (@id, @make, @description, @familyFriendly, @recalledAt, @createdAt, @updatedAt);',
|
||||||
|
substitutionValues: {
|
||||||
|
'id': id,
|
||||||
|
'make': make,
|
||||||
|
'description': description,
|
||||||
|
'familyFriendly': familyFriendly,
|
||||||
|
'recalledAt': recalledAt,
|
||||||
|
'createdAt': createdAt,
|
||||||
|
'updatedAt': updatedAt
|
||||||
|
});
|
||||||
|
return parseRow(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Stream<Car> getAll(PostgreSQLConnection connection) =>
|
||||||
|
new CarQuery().get(connection);
|
||||||
|
}
|
||||||
|
|
||||||
|
class CarQueryWhere {
|
||||||
|
final StringSqlExpressionBuilder id = new StringSqlExpressionBuilder();
|
||||||
|
|
||||||
|
final StringSqlExpressionBuilder make = new StringSqlExpressionBuilder();
|
||||||
|
|
||||||
|
final StringSqlExpressionBuilder description =
|
||||||
|
new StringSqlExpressionBuilder();
|
||||||
|
|
||||||
|
final BooleanSqlExpressionBuilder familyFriendly =
|
||||||
|
new BooleanSqlExpressionBuilder();
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder recalledAt =
|
||||||
|
new DateTimeSqlExpressionBuilder('recalled_at');
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder createdAt =
|
||||||
|
new DateTimeSqlExpressionBuilder('created_at');
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder updatedAt =
|
||||||
|
new DateTimeSqlExpressionBuilder('updated_at');
|
||||||
|
|
||||||
|
String toWhereClause() {
|
||||||
|
final List<String> expressions = [];
|
||||||
|
if (id.hasValue) {
|
||||||
|
expressions.add('`id` ' + id.compile());
|
||||||
|
}
|
||||||
|
if (make.hasValue) {
|
||||||
|
expressions.add('`make` ' + make.compile());
|
||||||
|
}
|
||||||
|
if (description.hasValue) {
|
||||||
|
expressions.add('`description` ' + description.compile());
|
||||||
|
}
|
||||||
|
if (familyFriendly.hasValue) {
|
||||||
|
expressions.add('`family_friendly` ' + familyFriendly.compile());
|
||||||
|
}
|
||||||
|
if (recalledAt.hasValue) {
|
||||||
|
expressions.add(recalledAt.compile());
|
||||||
|
}
|
||||||
|
if (createdAt.hasValue) {
|
||||||
|
expressions.add(createdAt.compile());
|
||||||
|
}
|
||||||
|
if (updatedAt.hasValue) {
|
||||||
|
expressions.add(updatedAt.compile());
|
||||||
|
}
|
||||||
|
return expressions.isEmpty ? null : ('WHERE ' + expressions.join(' AND '));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
9
test/models/car.up.g.sql
Normal file
9
test/models/car.up.g.sql
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
CREATE TABLE "cars" (
|
||||||
|
"id" varchar,
|
||||||
|
"make" varchar,
|
||||||
|
"description" varchar,
|
||||||
|
"family_friendly" bit,
|
||||||
|
"recalled_at" timestamp,
|
||||||
|
"created_at" timestamp,
|
||||||
|
"updated_at" timestamp
|
||||||
|
);
|
|
@ -1,10 +0,0 @@
|
||||||
library angel_test.test.models.tire;
|
|
||||||
|
|
||||||
import 'package:angel_framework/common.dart';
|
|
||||||
import 'package:angel_serialize/angel_serialize.dart';
|
|
||||||
part 'tire.g.dart';
|
|
||||||
|
|
||||||
@serializable
|
|
||||||
class _Tire extends Model {
|
|
||||||
int size;
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
|
||||||
|
|
||||||
part of angel_test.test.models.tire;
|
|
||||||
|
|
||||||
// **************************************************************************
|
|
||||||
// Generator: JsonModelGenerator
|
|
||||||
// Target: class _Tire
|
|
||||||
// **************************************************************************
|
|
||||||
|
|
||||||
class Tire extends _Tire {
|
|
||||||
@override
|
|
||||||
int size;
|
|
||||||
|
|
||||||
Tire({this.size});
|
|
||||||
|
|
||||||
factory Tire.fromJson(Map data) {
|
|
||||||
return new Tire(size: data['size']);
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() => {'size': size};
|
|
||||||
|
|
||||||
static Tire parse(Map map) => new Tire.fromJson(map);
|
|
||||||
}
|
|
|
@ -3,12 +3,14 @@ import 'package:source_gen/source_gen.dart';
|
||||||
import 'package:angel_orm/builder.dart';
|
import 'package:angel_orm/builder.dart';
|
||||||
import 'package:angel_serialize/builder.dart';
|
import 'package:angel_serialize/builder.dart';
|
||||||
|
|
||||||
|
final InputSet MODELS = new InputSet('angel_orm', const ['test/models/*.dart']);
|
||||||
|
|
||||||
final PhaseGroup PHASES = new PhaseGroup()
|
final PhaseGroup PHASES = new PhaseGroup()
|
||||||
..addPhase(new Phase()
|
..addPhase(new Phase()
|
||||||
..addAction(new GeneratorBuilder([const JsonModelGenerator()]),
|
..addAction(new GeneratorBuilder([const JsonModelGenerator()]), MODELS))
|
||||||
new InputSet('angel_orm', const ['test/models/*.dart'])))
|
|
||||||
..addPhase(new Phase()
|
..addPhase(new Phase()
|
||||||
..addAction(
|
..addAction(
|
||||||
new GeneratorBuilder([new PostgresORMGenerator()],
|
new GeneratorBuilder([new PostgresORMGenerator()],
|
||||||
isStandalone: true, generatedExtension: '.orm.g.dart'),
|
isStandalone: true, generatedExtension: '.orm.g.dart'),
|
||||||
new InputSet('angel_orm', const ['test/models/*.dart'])));
|
MODELS))
|
||||||
|
..addPhase(new Phase()..addAction(new SQLMigrationGenerator(), MODELS));
|
||||||
|
|
Loading…
Reference in a new issue