Added belongsTo
This commit is contained in:
parent
9aebd67393
commit
6a111b12cf
22 changed files with 743 additions and 136 deletions
6
.idea/runConfigurations/tests_in_book_test_dart.xml
Normal file
6
.idea/runConfigurations/tests_in_book_test_dart.xml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="tests in book_test.dart" type="DartTestRunConfigurationType" factoryName="Dart Test" folderName="book" singleton="true">
|
||||||
|
<option name="filePath" value="$PROJECT_DIR$/angel_orm_generator/test/book_test.dart" />
|
||||||
|
<method />
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -1,3 +1,7 @@
|
||||||
|
# 1.0.0-alpha+6
|
||||||
|
* `DateTimeSqlExpressionBuilder` will no longer automatically
|
||||||
|
insert quotation marks around names.
|
||||||
|
|
||||||
# 1.0.0-alpha+5
|
# 1.0.0-alpha+5
|
||||||
* Corrected a typo that was causing the aforementioned test failures.
|
* Corrected a typo that was causing the aforementioned test failures.
|
||||||
`==` becomes `=`.
|
`==` becomes `=`.
|
||||||
|
|
|
@ -260,7 +260,7 @@ class DateTimeSqlExpressionBuilder implements SqlExpressionBuilder {
|
||||||
|
|
||||||
bool _change(String _op, DateTime dt, bool time) {
|
bool _change(String _op, DateTime dt, bool time) {
|
||||||
var dateString = time ? DATE_YMD_HMS.format(dt) : DATE_YMD.format(dt);
|
var dateString = time ? DATE_YMD_HMS.format(dt) : DATE_YMD.format(dt);
|
||||||
_raw = '"$columnName" $_op \'$dateString\'';
|
_raw = '$columnName $_op \'$dateString\'';
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -291,40 +291,40 @@ class DateTimeSqlExpressionBuilder implements SqlExpressionBuilder {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void isIn(@checked Iterable<DateTime> values) {
|
void isIn(@checked Iterable<DateTime> values) {
|
||||||
_raw = '"$columnName" IN (' +
|
_raw = '$columnName IN (' +
|
||||||
values.map(DATE_YMD_HMS.format).map((s) => "'$s'").join(', ') +
|
values.map(DATE_YMD_HMS.format).map((s) => '$s').join(', ') +
|
||||||
')';
|
')';
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void isNotIn(@checked Iterable<DateTime> values) {
|
void isNotIn(@checked Iterable<DateTime> values) {
|
||||||
_raw = '"$columnName" NOT IN (' +
|
_raw = '$columnName NOT IN (' +
|
||||||
values.map(DATE_YMD_HMS.format).map((s) => "'$s'").join(', ') +
|
values.map(DATE_YMD_HMS.format).map((s) => '$s').join(', ') +
|
||||||
')';
|
')';
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void isBetween(@checked DateTime lower, @checked DateTime upper) {
|
void isBetween(@checked DateTime lower, @checked DateTime upper) {
|
||||||
var l = DATE_YMD_HMS.format(lower), u = DATE_YMD_HMS.format(upper);
|
var l = DATE_YMD_HMS.format(lower), u = DATE_YMD_HMS.format(upper);
|
||||||
_raw = "\"$columnName\" BETWEEN '$l' and '$u'";
|
_raw = "$columnName BETWEEN '$l' and '$u'";
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
void isNotBetween(@checked DateTime lower, @checked DateTime upper) {
|
void isNotBetween(@checked DateTime lower, @checked DateTime upper) {
|
||||||
var l = DATE_YMD_HMS.format(lower), u = DATE_YMD_HMS.format(upper);
|
var l = DATE_YMD_HMS.format(lower), u = DATE_YMD_HMS.format(upper);
|
||||||
_raw = "\"$columnName\" NOT BETWEEN '$l' and '$u'";
|
_raw = "$columnName NOT BETWEEN '$l' and '$u'";
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String compile() {
|
String compile() {
|
||||||
if (_raw?.isNotEmpty == true) return _raw;
|
if (_raw?.isNotEmpty == true) return _raw;
|
||||||
List<String> parts = [];
|
List<String> parts = [];
|
||||||
if (year.hasValue) parts.add('YEAR("$columnName") ${year.compile()}');
|
if (year.hasValue) parts.add('YEAR($columnName) ${year.compile()}');
|
||||||
if (month.hasValue) parts.add('MONTH("$columnName") ${month.compile()}');
|
if (month.hasValue) parts.add('MONTH($columnName) ${month.compile()}');
|
||||||
if (day.hasValue) parts.add('DAY("$columnName") ${day.compile()}');
|
if (day.hasValue) parts.add('DAY($columnName) ${day.compile()}');
|
||||||
if (hour.hasValue) parts.add('HOUR("$columnName") ${hour.compile()}');
|
if (hour.hasValue) parts.add('HOUR($columnName) ${hour.compile()}');
|
||||||
if (minute.hasValue) parts.add('MINUTE("$columnName") ${minute.compile()}');
|
if (minute.hasValue) parts.add('MINUTE($columnName) ${minute.compile()}');
|
||||||
if (second.hasValue) parts.add('SECOND("$columnName") ${second.compile()}');
|
if (second.hasValue) parts.add('SECOND($columnName) ${second.compile()}');
|
||||||
|
|
||||||
return parts.isEmpty ? null : parts.join(' AND ');
|
return parts.isEmpty ? null : parts.join(' AND ');
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: angel_orm
|
name: angel_orm
|
||||||
version: 1.0.0-alpha+5
|
version: 1.0.0-alpha+6
|
||||||
description: Runtime support for Angel's ORM.
|
description: Runtime support for Angel's ORM.
|
||||||
author: Tobe O <thosakwe@gmail.com>
|
author: Tobe O <thosakwe@gmail.com>
|
||||||
homepage: https://github.com/angel-dart/orm
|
homepage: https://github.com/angel-dart/orm
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
# 1.0.0-alpha+2
|
||||||
|
* Added support for `belongsTo` relationships. Still missing `hasOne`, `hasMany`, `belongsToMany`.
|
||||||
|
|
||||||
# 1.0.0-alpha+1
|
# 1.0.0-alpha+1
|
||||||
* Closed #12. `insertX` and `updateX` now use `rc.camelCase`, instead of `rc.snakeCase`.
|
* Closed #12. `insertX` and `updateX` now use `rc.camelCase`, instead of `rc.snakeCase`.
|
||||||
* Closed #13. Added `limit` and `offset` properties to `XQuery`.
|
* Closed #13. Added `limit` and `offset` properties to `XQuery`.
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:analyzer/dart/element/type.dart';
|
||||||
|
import 'package:analyzer/src/dart/element/element.dart';
|
||||||
import 'package:angel_orm/angel_orm.dart';
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
import 'package:angel_serialize_generator/src/find_annotation.dart';
|
import 'package:angel_serialize_generator/src/find_annotation.dart';
|
||||||
import 'package:angel_serialize_generator/build_context.dart' as serialize;
|
import 'package:angel_serialize_generator/build_context.dart' as serialize;
|
||||||
|
@ -21,7 +23,9 @@ PostgresBuildContext buildContext(
|
||||||
var ctx = new PostgresBuildContext(raw, annotation, resolver, buildStep,
|
var ctx = new PostgresBuildContext(raw, annotation, resolver, buildStep,
|
||||||
tableName: annotation.tableName?.isNotEmpty == true
|
tableName: annotation.tableName?.isNotEmpty == true
|
||||||
? annotation.tableName
|
? annotation.tableName
|
||||||
: pluralize(new ReCase(clazz.name).snakeCase));
|
: pluralize(new ReCase(clazz.name).snakeCase),
|
||||||
|
autoSnakeCaseNames: autoSnakeCaseNames != false,
|
||||||
|
autoIdAndDateFields: autoIdAndDateFields != false);
|
||||||
var relations = new TypeChecker.fromRuntime(Relationship);
|
var relations = new TypeChecker.fromRuntime(Relationship);
|
||||||
List<String> fieldNames = [];
|
List<String> fieldNames = [];
|
||||||
List<FieldElement> fields = [];
|
List<FieldElement> fields = [];
|
||||||
|
@ -107,5 +111,28 @@ PostgresBuildContext buildContext(
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.fields.addAll(fields);
|
ctx.fields.addAll(fields);
|
||||||
|
|
||||||
|
// Add belongs to fields
|
||||||
|
// TODO: Do this for belongs to many as well
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
var rc = new ReCase(relationship.localKey);
|
||||||
|
|
||||||
|
if (relationship.type == RelationshipType.BELONGS_TO) {
|
||||||
|
var field = new RelationshipConstraintField(
|
||||||
|
rc.camelCase, ctx.typeProvider.intType, name);
|
||||||
|
ctx.fields.add(field);
|
||||||
|
ctx.aliases[field.name] = relationship.localKey;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
return ctx;
|
return ctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class RelationshipConstraintField extends FieldElementImpl {
|
||||||
|
@override
|
||||||
|
final DartType type;
|
||||||
|
final String originalName;
|
||||||
|
RelationshipConstraintField(String name, this.type, this.originalName)
|
||||||
|
: super(name, -1);
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ import 'dart:async';
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
import 'package:angel_orm/angel_orm.dart';
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
import 'package:build/build.dart';
|
import 'package:build/build.dart';
|
||||||
|
import 'package:inflection/inflection.dart';
|
||||||
|
import 'package:recase/recase.dart';
|
||||||
import 'package:source_gen/src/annotation.dart';
|
import 'package:source_gen/src/annotation.dart';
|
||||||
import 'package:source_gen/src/utils.dart';
|
import 'package:source_gen/src/utils.dart';
|
||||||
import 'build_context.dart';
|
import 'build_context.dart';
|
||||||
|
@ -93,6 +95,56 @@ class SQLMigrationGenerator implements Builder {
|
||||||
if (col.nullable != true) buf.write(' NOT NULLABLE');
|
if (col.nullable != true) buf.write(' NOT NULLABLE');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Relations
|
||||||
|
ctx.relationshipFields.forEach((f) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
var typeName =
|
||||||
|
f.type.name.startsWith('_') ? f.type.name.substring(1) : f.type.name;
|
||||||
|
var rc = new ReCase(typeName);
|
||||||
|
var relationship = ctx.relationships[f.name];
|
||||||
|
|
||||||
|
if (relationship.type == RelationshipType.BELONGS_TO) {
|
||||||
|
var localKey = relationship.localKey ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? '${rc.snakeCase}_id'
|
||||||
|
: '${typeName}Id');
|
||||||
|
var foreignKey = relationship.foreignKey ?? 'id';
|
||||||
|
var foreignTable = relationship.foreignTable ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? pluralize(rc.snakeCase)
|
||||||
|
: pluralize(typeName));
|
||||||
|
buf.write(' "$localKey" int REFERENCES $foreignTable($foreignKey)');
|
||||||
|
if (relationship.cascadeOnDelete != false)
|
||||||
|
buf.write(' ON DELETE CASCADE');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Primary keys, unique
|
||||||
|
bool hasPrimary = false;
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
var col = ctx.columnInfo[f.name];
|
||||||
|
if (col != null) {
|
||||||
|
var name = ctx.resolveFieldName(f.name);
|
||||||
|
if (col.index == IndexType.UNIQUE) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
buf.write(' UNIQUE($name(');
|
||||||
|
} else if (col.index == IndexType.PRIMARY_KEY) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
hasPrimary = true;
|
||||||
|
buf.write(' PRIMARY KEY($name)');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!hasPrimary) {
|
||||||
|
var idField =
|
||||||
|
ctx.fields.firstWhere((f) => f.name == 'id', orElse: () => null);
|
||||||
|
if (idField != null) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
buf.write(' PRIMARY KEY(id)');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
buf.writeln();
|
buf.writeln();
|
||||||
buf.writeln(');');
|
buf.writeln(');');
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,10 @@ import 'dart:async';
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
import 'package:angel_orm/angel_orm.dart';
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
import 'package:build/build.dart';
|
import 'package:build/build.dart';
|
||||||
|
import 'package:code_builder/dart/async.dart';
|
||||||
import 'package:code_builder/dart/core.dart';
|
import 'package:code_builder/dart/core.dart';
|
||||||
import 'package:code_builder/code_builder.dart';
|
import 'package:code_builder/code_builder.dart';
|
||||||
|
import 'package:inflection/inflection.dart';
|
||||||
import 'package:path/path.dart' as p;
|
import 'package:path/path.dart' as p;
|
||||||
import 'package:recase/recase.dart';
|
import 'package:recase/recase.dart';
|
||||||
import 'package:source_gen/src/annotation.dart';
|
import 'package:source_gen/src/annotation.dart';
|
||||||
|
@ -125,7 +127,9 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
var m = new MethodBuilder('sort$sort', returnType: lib$core.$void);
|
var m = new MethodBuilder('sort$sort', returnType: lib$core.$void);
|
||||||
m.addPositional(parameter('key', [lib$core.String]));
|
m.addPositional(parameter('key', [lib$core.String]));
|
||||||
m.addStatement(literal(sort).asAssign(reference('_sortMode')));
|
m.addStatement(literal(sort).asAssign(reference('_sortMode')));
|
||||||
m.addStatement(reference('key').asAssign(reference('_sortKey')));
|
m.addStatement((literal(ctx.prefix) + reference('key'))
|
||||||
|
.parentheses()
|
||||||
|
.asAssign(reference('_sortKey')));
|
||||||
clazz.addMethod(m);
|
clazz.addMethod(m);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -199,8 +203,31 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
return clazz;
|
return clazz;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String computeSelector(PostgresBuildContext ctx) {
|
||||||
|
var buf = new StringBuffer();
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
// Add all regular fields
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
var name = ctx.resolveFieldName(f.name);
|
||||||
|
buf.write(ctx.prefix + "$name");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add all relationship fields...
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
relationship.modelTypeContext.fields.forEach((f) {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
var name = relationship.modelTypeContext.resolveFieldName(f.name);
|
||||||
|
buf.write('${relationship.foreignTable}.$name');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return buf.toString();
|
||||||
|
}
|
||||||
|
|
||||||
MethodBuilder buildToSqlMethod(PostgresBuildContext ctx) {
|
MethodBuilder buildToSqlMethod(PostgresBuildContext ctx) {
|
||||||
// TODO: Bake relationships into SQL queries
|
|
||||||
var meth = new MethodBuilder('toSql', returnType: lib$core.String);
|
var meth = new MethodBuilder('toSql', returnType: lib$core.String);
|
||||||
meth.addPositional(parameter('prefix', [lib$core.String]).asOptional());
|
meth.addPositional(parameter('prefix', [lib$core.String]).asOptional());
|
||||||
var buf = reference('buf');
|
var buf = reference('buf');
|
||||||
|
@ -210,11 +237,27 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
// Write prefix, or default to SELECT
|
// Write prefix, or default to SELECT
|
||||||
var prefix = reference('prefix');
|
var prefix = reference('prefix');
|
||||||
meth.addStatement(buf.invoke('write', [
|
meth.addStatement(buf.invoke('write', [
|
||||||
prefix
|
prefix.notEquals(literal(null)).ternary(prefix,
|
||||||
.notEquals(literal(null))
|
literal('SELECT ${computeSelector(ctx)} FROM "${ctx.tableName}"'))
|
||||||
.ternary(prefix, literal('SELECT * FROM "${ctx.tableName}"'))
|
|
||||||
]));
|
]));
|
||||||
|
|
||||||
|
var relationsIfThen = ifThen(prefix.equals(literal(null)));
|
||||||
|
|
||||||
|
// Apply relationships
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
// TODO: Has one, has many, belongs to many
|
||||||
|
if (relationship.type == RelationshipType.BELONGS_TO) {
|
||||||
|
var b = new StringBuffer(
|
||||||
|
' INNER JOIN ${relationship.foreignTable} ON ${ctx.tableName}.${relationship.localKey} = ${relationship.foreignTable}.${relationship.foreignKey}');
|
||||||
|
relationsIfThen
|
||||||
|
.addStatement(buf.invoke('write', [literal(b.toString())]));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
meth.addStatement(relationsIfThen);
|
||||||
|
|
||||||
meth.addStatement(varField('whereClause',
|
meth.addStatement(varField('whereClause',
|
||||||
value: reference('where').invoke('toWhereClause', [])));
|
value: reference('where').invoke('toWhereClause', [])));
|
||||||
|
|
||||||
|
@ -308,32 +351,62 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
data[name] = rowKey;
|
data[name] = rowKey;
|
||||||
});
|
});
|
||||||
|
|
||||||
ctx.relationships.forEach((name, relationship) {
|
// Invoke fromJson()
|
||||||
var field = ctx.resolveRelationshipField(name);
|
var result = reference('result');
|
||||||
var alias = ctx.resolveFieldName(name);
|
meth.addStatement(varField('result',
|
||||||
var idx = i++;
|
value: ctx.modelClassBuilder
|
||||||
var rowKey = row[literal(idx)];
|
.newInstance([map(data)], constructor: 'fromJson')));
|
||||||
data[alias] = (row.property('length') < literal(idx + 1)).ternary(
|
|
||||||
literal(null),
|
// For each relationship, try to parse
|
||||||
new TypeBuilder(new ReCase(field.type.name).pascalCase + 'Query')
|
ctx.relationships.forEach((name, r) {
|
||||||
.invoke('parseRow', [rowKey]));
|
int minIndex = i;
|
||||||
|
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
var rc = new ReCase(relationship.dartType.name);
|
||||||
|
var relationshipQuery = new TypeBuilder('${rc.pascalCase}Query');
|
||||||
|
List<ExpressionBuilder> relationshipRow = [];
|
||||||
|
|
||||||
|
relationship.modelTypeContext.fields.forEach((f) {
|
||||||
|
relationshipRow.add(row[literal(i++)]);
|
||||||
|
});
|
||||||
|
|
||||||
|
meth.addStatement(ifThen(row.property('length') > literal(minIndex), [
|
||||||
|
relationshipQuery.invoke(
|
||||||
|
'parseRow', [list(relationshipRow)]).asAssign(result.property(name))
|
||||||
|
]));
|
||||||
});
|
});
|
||||||
|
|
||||||
// Then, call a .fromJson() constructor
|
// Then, call a .fromJson() constructor
|
||||||
meth.addStatement(ctx.modelClassBuilder
|
meth.addStatement(result.asReturn());
|
||||||
.newInstance([map(data)], constructor: 'fromJson').asReturn());
|
|
||||||
|
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
void _invokeStreamClosure(ExpressionBuilder future, MethodBuilder meth) {
|
void _invokeStreamClosure(
|
||||||
|
PostgresBuildContext ctx, ExpressionBuilder future, MethodBuilder meth) {
|
||||||
var ctrl = reference('ctrl');
|
var ctrl = reference('ctrl');
|
||||||
// Invoke query...
|
// Invoke query...
|
||||||
var catchError = ctrl.property('addError');
|
var catchError = ctrl.property('addError');
|
||||||
var then = new MethodBuilder.closure()..addPositional(parameter('rows'));
|
var then = new MethodBuilder.closure(modifier: MethodModifier.asAsync)
|
||||||
then.addStatement(reference('rows')
|
..addPositional(parameter('rows'));
|
||||||
.invoke('map', [reference('parseRow')]).invoke(
|
|
||||||
'forEach', [ctrl.property('add')]));
|
var forEachClosure =
|
||||||
|
new MethodBuilder.closure(modifier: MethodModifier.asAsync);
|
||||||
|
forEachClosure.addPositional(parameter('row'));
|
||||||
|
forEachClosure.addStatement(varField('parsed',
|
||||||
|
value: reference('parseRow').call([reference('row')])));
|
||||||
|
_applyRelationshipsToOutput(
|
||||||
|
ctx, reference('parsed'), reference('row'), forEachClosure);
|
||||||
|
forEachClosure.addStatement(reference('parsed').asReturn());
|
||||||
|
|
||||||
|
then.addStatement(varField('futures',
|
||||||
|
value: reference('rows').invoke('map', [forEachClosure])));
|
||||||
|
then.addStatement(varField('output',
|
||||||
|
value:
|
||||||
|
lib$async.Future.invoke('wait', [reference('futures')]).asAwait()));
|
||||||
|
then.addStatement(
|
||||||
|
reference('output').invoke('forEach', [ctrl.property('add')]));
|
||||||
|
|
||||||
then.addStatement(ctrl.invoke('close', []));
|
then.addStatement(ctrl.invoke('close', []));
|
||||||
meth.addStatement(
|
meth.addStatement(
|
||||||
future.invoke('then', [then]).invoke('catchError', [catchError]));
|
future.invoke('then', [then]).invoke('catchError', [catchError]));
|
||||||
|
@ -353,7 +426,7 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
|
|
||||||
var future =
|
var future =
|
||||||
reference('connection').invoke('query', [reference('toSql').call([])]);
|
reference('connection').invoke('query', [reference('toSql').call([])]);
|
||||||
_invokeStreamClosure(future, meth);
|
_invokeStreamClosure(ctx, future, meth);
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -364,16 +437,23 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
meth.addPositional(parameter('id', [lib$core.int]));
|
meth.addPositional(parameter('id', [lib$core.int]));
|
||||||
meth.addPositional(
|
meth.addPositional(
|
||||||
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
meth.addStatement(reference('connection').invoke('query', [
|
|
||||||
literal('SELECT * FROM "${ctx.tableName}" WHERE "id" = @id;')
|
var query = reference('query'),
|
||||||
], namedArguments: {
|
whereId = query.property('where').property('id');
|
||||||
'substitutionValues': map({'id': reference('id')})
|
meth.addStatement(
|
||||||
}).invoke('then', [
|
varField('query', value: ctx.queryClassBuilder.newInstance([])));
|
||||||
new MethodBuilder.closure(
|
meth.addStatement(whereId.invoke('equals', [reference('id')]));
|
||||||
returns:
|
|
||||||
reference('parseRow').call([reference('rows').property('first')]))
|
// Return null on error
|
||||||
..addPositional(parameter('rows'))
|
var catchErr = new MethodBuilder.closure(returns: literal(null));
|
||||||
]).asReturn());
|
catchErr.addPositional(parameter('_'));
|
||||||
|
|
||||||
|
meth.addStatement(query
|
||||||
|
.invoke('get', [reference('connection')])
|
||||||
|
.property('first')
|
||||||
|
.invoke('catchError', [catchErr])
|
||||||
|
.asReturn());
|
||||||
|
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -491,7 +571,7 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
$buf.invoke('toString', []) + literal(buf2.toString()),
|
$buf.invoke('toString', []) + literal(buf2.toString()),
|
||||||
meth,
|
meth,
|
||||||
substitutionValues);
|
substitutionValues);
|
||||||
_invokeStreamClosure(result, meth);
|
_invokeStreamClosure(ctx, result, meth);
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -514,7 +594,7 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
reference('toSql').call([literal('DELETE FROM "${ctx.tableName}"')]) +
|
reference('toSql').call([literal('DELETE FROM "${ctx.tableName}"')]) +
|
||||||
literal(litBuf.toString())
|
literal(litBuf.toString())
|
||||||
]);
|
]);
|
||||||
_invokeStreamClosure(future, meth);
|
_invokeStreamClosure(ctx, future, meth);
|
||||||
|
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
@ -592,18 +672,71 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
|
|
||||||
var connection = reference('connection');
|
var connection = reference('connection');
|
||||||
var query = literal(buf.toString());
|
var query = literal(buf.toString());
|
||||||
var result = reference('result');
|
var result = reference('result'), output = reference('output');
|
||||||
meth.addStatement(varField('result',
|
meth.addStatement(varField('result',
|
||||||
value: connection.invoke('query', [
|
value: connection.invoke('query', [
|
||||||
query
|
query
|
||||||
], namedArguments: {
|
], namedArguments: {
|
||||||
'substitutionValues': map(substitutionValues)
|
'substitutionValues': map(substitutionValues)
|
||||||
}).asAwait()));
|
}).asAwait()));
|
||||||
meth.addStatement(
|
|
||||||
reference('parseRow').call([result[literal(0)]]).asReturn());
|
meth.addStatement(varField('output',
|
||||||
|
value: reference('parseRow').call([result[literal(0)]])));
|
||||||
|
|
||||||
|
_applyRelationshipsToOutput(ctx, output, result[literal(0)], meth);
|
||||||
|
|
||||||
|
meth.addStatement(output.asReturn());
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void _applyRelationshipsToOutput(PostgresBuildContext ctx,
|
||||||
|
ExpressionBuilder output, ExpressionBuilder row, MethodBuilder meth) {
|
||||||
|
// Every relationship should fill itself in with a query
|
||||||
|
// TODO: Has one, has many, belongs to many
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
if (relationship.type == RelationshipType.BELONGS_TO) {
|
||||||
|
var rc = new ReCase(relationship.dartType.name);
|
||||||
|
var type = new TypeBuilder('${rc.pascalCase}Query');
|
||||||
|
|
||||||
|
// Resolve index within row...
|
||||||
|
bool matched = false;
|
||||||
|
int col = 0;
|
||||||
|
for (var field in ctx.fields) {
|
||||||
|
if (field is RelationshipConstraintField &&
|
||||||
|
field.originalName == name) {
|
||||||
|
matched = true;
|
||||||
|
break;
|
||||||
|
} else
|
||||||
|
col++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!matched)
|
||||||
|
throw 'Couldn\'t resolve row index for relationship "${name}".';
|
||||||
|
|
||||||
|
var idAsInt = row[literal(col)];
|
||||||
|
meth.addStatement(type
|
||||||
|
.invoke('getOne', [idAsInt, reference('connection')])
|
||||||
|
.asAwait()
|
||||||
|
.asAssign(output.property(name)));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void _addRelationshipConstraintsNamed(
|
||||||
|
MethodBuilder m, PostgresBuildContext ctx) {
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
// TODO: Belongs to many
|
||||||
|
if (relationship.type == RelationshipType.BELONGS_TO) {
|
||||||
|
var rc = new ReCase(relationship.localKey);
|
||||||
|
m.addNamed(parameter(rc.camelCase, [lib$core.int]));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
MethodBuilder buildInsertModelMethod(PostgresBuildContext ctx) {
|
MethodBuilder buildInsertModelMethod(PostgresBuildContext ctx) {
|
||||||
var rc = new ReCase(ctx.modelClassName);
|
var rc = new ReCase(ctx.modelClassName);
|
||||||
var meth = new MethodBuilder('insert${rc.pascalCase}',
|
var meth = new MethodBuilder('insert${rc.pascalCase}',
|
||||||
|
@ -613,12 +746,17 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
meth.addPositional(
|
meth.addPositional(
|
||||||
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
meth.addPositional(parameter(rc.camelCase, [ctx.modelClassBuilder]));
|
meth.addPositional(parameter(rc.camelCase, [ctx.modelClassBuilder]));
|
||||||
|
_addRelationshipConstraintsNamed(meth, ctx);
|
||||||
|
|
||||||
Map<String, ExpressionBuilder> args = {};
|
Map<String, ExpressionBuilder> args = {};
|
||||||
var ref = reference(rc.camelCase);
|
var ref = reference(rc.camelCase);
|
||||||
|
|
||||||
ctx.fields.forEach((f) {
|
ctx.fields.forEach((f) {
|
||||||
if (f.name != 'id') args[f.name] = ref.property(f.name);
|
if (f.name != 'id') {
|
||||||
|
args[f.name] = f is RelationshipConstraintField
|
||||||
|
? reference(f.name)
|
||||||
|
: ref.property(f.name);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
meth.addStatement(ctx.queryClassBuilder
|
meth.addStatement(ctx.queryClassBuilder
|
||||||
|
@ -652,7 +790,16 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
// return query.update(connection, ...).first;
|
// return query.update(connection, ...).first;
|
||||||
Map<String, ExpressionBuilder> args = {};
|
Map<String, ExpressionBuilder> args = {};
|
||||||
ctx.fields.forEach((f) {
|
ctx.fields.forEach((f) {
|
||||||
if (f.name != 'id') args[f.name] = ref.property(f.name);
|
if (f.name != 'id') {
|
||||||
|
if (f is RelationshipConstraintField) {
|
||||||
|
// Need to int.parse the related id and pass it
|
||||||
|
var relation = ref.property(f.originalName);
|
||||||
|
var relationship = ctx.populateRelationship(f.originalName);
|
||||||
|
args[f.name] = lib$core.int
|
||||||
|
.invoke('parse', [relation.property(relationship.foreignKey)]);
|
||||||
|
} else
|
||||||
|
args[f.name] = ref.property(f.name);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
var update =
|
var update =
|
||||||
|
@ -693,7 +840,8 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
break;
|
break;
|
||||||
case 'DateTime':
|
case 'DateTime':
|
||||||
queryBuilderType = new TypeBuilder('DateTimeSqlExpressionBuilder');
|
queryBuilderType = new TypeBuilder('DateTimeSqlExpressionBuilder');
|
||||||
args.add(literal(ctx.resolveFieldName(field.name)));
|
args.add(literal(
|
||||||
|
ctx.tableName + '.' + ctx.resolveFieldName(field.name)));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -719,9 +867,10 @@ class PostgresORMGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
ctx.fields.forEach((field) {
|
ctx.fields.forEach((field) {
|
||||||
var name = ctx.resolveFieldName(field.name);
|
var name = ctx.resolveFieldName(field.name);
|
||||||
var queryBuilder = reference(field.name);
|
var queryBuilder = reference(field.name);
|
||||||
var toAdd = field.type.name == 'DateTime'
|
var toAdd = field.type.isAssignableTo(ctx.dateTimeType)
|
||||||
? queryBuilder.invoke('compile', [])
|
? queryBuilder.invoke('compile', [])
|
||||||
: (literal('"$name" ') + queryBuilder.invoke('compile', []));
|
: (literal('${ctx.tableName}.$name ') +
|
||||||
|
queryBuilder.invoke('compile', []));
|
||||||
|
|
||||||
toWhereClause.addStatement(ifThen(queryBuilder.property('hasValue'), [
|
toWhereClause.addStatement(ifThen(queryBuilder.property('hasValue'), [
|
||||||
expressions.invoke('add', [toAdd])
|
expressions.invoke('add', [toAdd])
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import 'package:analyzer/dart/constant/value.dart';
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
import 'package:analyzer/dart/element/type.dart';
|
import 'package:analyzer/dart/element/type.dart';
|
||||||
import 'package:analyzer/src/generated/resolver.dart';
|
import 'package:analyzer/src/generated/resolver.dart';
|
||||||
|
@ -5,6 +6,10 @@ import 'package:angel_orm/angel_orm.dart';
|
||||||
import 'package:angel_serialize_generator/context.dart';
|
import 'package:angel_serialize_generator/context.dart';
|
||||||
import 'package:build/build.dart';
|
import 'package:build/build.dart';
|
||||||
import 'package:code_builder/code_builder.dart';
|
import 'package:code_builder/code_builder.dart';
|
||||||
|
import 'package:inflection/inflection.dart';
|
||||||
|
import 'package:recase/recase.dart';
|
||||||
|
import 'package:source_gen/source_gen.dart';
|
||||||
|
import 'build_context.dart';
|
||||||
|
|
||||||
class PostgresBuildContext extends BuildContext {
|
class PostgresBuildContext extends BuildContext {
|
||||||
DartType _dateTimeTypeCache;
|
DartType _dateTimeTypeCache;
|
||||||
|
@ -14,9 +19,12 @@ class PostgresBuildContext extends BuildContext {
|
||||||
_queryClassBuilder,
|
_queryClassBuilder,
|
||||||
_whereClassBuilder,
|
_whereClassBuilder,
|
||||||
_postgresqlConnectionBuilder;
|
_postgresqlConnectionBuilder;
|
||||||
|
String _prefix;
|
||||||
|
final Map<String, Relationship> _populatedRelationships = {};
|
||||||
final Map<String, Column> columnInfo = {};
|
final Map<String, Column> columnInfo = {};
|
||||||
final Map<String, IndexType> indices = {};
|
final Map<String, IndexType> indices = {};
|
||||||
final Map<String, Relationship> relationships = {};
|
final Map<String, Relationship> relationships = {};
|
||||||
|
final bool autoSnakeCaseNames, autoIdAndDateFields;
|
||||||
final String tableName;
|
final String tableName;
|
||||||
final ORM ormAnnotation;
|
final ORM ormAnnotation;
|
||||||
final BuildContext raw;
|
final BuildContext raw;
|
||||||
|
@ -26,7 +34,7 @@ class PostgresBuildContext extends BuildContext {
|
||||||
|
|
||||||
PostgresBuildContext(
|
PostgresBuildContext(
|
||||||
this.raw, this.ormAnnotation, this.resolver, this.buildStep,
|
this.raw, this.ormAnnotation, this.resolver, this.buildStep,
|
||||||
{this.tableName})
|
{this.tableName, this.autoSnakeCaseNames, this.autoIdAndDateFields})
|
||||||
: super(raw.annotation,
|
: super(raw.annotation,
|
||||||
originalClassName: raw.originalClassName,
|
originalClassName: raw.originalClassName,
|
||||||
sourceFilename: raw.sourceFilename);
|
sourceFilename: raw.sourceFilename);
|
||||||
|
@ -45,6 +53,14 @@ class PostgresBuildContext extends BuildContext {
|
||||||
TypeBuilder get postgreSQLConnectionBuilder =>
|
TypeBuilder get postgreSQLConnectionBuilder =>
|
||||||
_postgresqlConnectionBuilder ??= new TypeBuilder('PostgreSQLConnection');
|
_postgresqlConnectionBuilder ??= new TypeBuilder('PostgreSQLConnection');
|
||||||
|
|
||||||
|
String get prefix {
|
||||||
|
if (_prefix != null) return _prefix;
|
||||||
|
if (relationships.isEmpty)
|
||||||
|
return _prefix = '';
|
||||||
|
else
|
||||||
|
return _prefix = tableName + '.';
|
||||||
|
}
|
||||||
|
|
||||||
Map<String, String> get aliases => raw.aliases;
|
Map<String, String> get aliases => raw.aliases;
|
||||||
|
|
||||||
Map<String, bool> get shimmed => raw.shimmed;
|
Map<String, bool> get shimmed => raw.shimmed;
|
||||||
|
@ -71,4 +87,110 @@ class PostgresBuildContext extends BuildContext {
|
||||||
|
|
||||||
FieldElement resolveRelationshipField(String name) =>
|
FieldElement resolveRelationshipField(String name) =>
|
||||||
relationshipFields.firstWhere((f) => f.name == name, orElse: () => null);
|
relationshipFields.firstWhere((f) => f.name == name, orElse: () => null);
|
||||||
|
|
||||||
|
PopulatedRelationship populateRelationship(String name) {
|
||||||
|
return _populatedRelationships.putIfAbsent(name, () {
|
||||||
|
// TODO: Belongs to many
|
||||||
|
var f = raw.fields.firstWhere((f) => f.name == name);
|
||||||
|
var relationship = relationships[name];
|
||||||
|
var typeName =
|
||||||
|
f.type.name.startsWith('_') ? f.type.name.substring(1) : f.type.name;
|
||||||
|
var rc = new ReCase(typeName);
|
||||||
|
|
||||||
|
if (relationship.type == RelationshipType.HAS_ONE ||
|
||||||
|
relationship.type == RelationshipType.HAS_MANY) {
|
||||||
|
var foreignKey = relationship.localKey ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? '${rc.snakeCase}_id'
|
||||||
|
: '${typeName}Id');
|
||||||
|
var localKey = relationship.foreignKey ?? 'id';
|
||||||
|
var foreignTable = relationship.foreignTable ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? pluralize(rc.snakeCase)
|
||||||
|
: pluralize(typeName));
|
||||||
|
return new PopulatedRelationship(relationship.type, f.type, buildStep,
|
||||||
|
resolver, autoSnakeCaseNames, autoIdAndDateFields,
|
||||||
|
localKey: localKey,
|
||||||
|
foreignKey: foreignKey,
|
||||||
|
foreignTable: foreignTable,
|
||||||
|
cascadeOnDelete: relationship.cascadeOnDelete);
|
||||||
|
} else if (relationship.type == RelationshipType.BELONGS_TO) {
|
||||||
|
var localKey = relationship.localKey ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? '${rc.snakeCase}_id'
|
||||||
|
: '${typeName}Id');
|
||||||
|
var foreignKey = relationship.foreignKey ?? 'id';
|
||||||
|
var foreignTable = relationship.foreignTable ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? pluralize(rc.snakeCase)
|
||||||
|
: pluralize(typeName));
|
||||||
|
return new PopulatedRelationship(relationship.type, f.type, buildStep,
|
||||||
|
resolver, autoSnakeCaseNames, autoIdAndDateFields,
|
||||||
|
localKey: localKey,
|
||||||
|
foreignKey: foreignKey,
|
||||||
|
foreignTable: foreignTable,
|
||||||
|
cascadeOnDelete: relationship.cascadeOnDelete);
|
||||||
|
} else
|
||||||
|
throw new UnsupportedError(
|
||||||
|
'Invalid relationship type: ${relationship.type}');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PopulatedRelationship extends Relationship {
|
||||||
|
DartType _modelType;
|
||||||
|
PostgresBuildContext _modelTypeContext;
|
||||||
|
DartObject _modelTypeORM;
|
||||||
|
final DartType dartType;
|
||||||
|
final BuildStep buildStep;
|
||||||
|
final Resolver resolver;
|
||||||
|
final bool autoSnakeCaseNames, autoIdAndDateFields;
|
||||||
|
|
||||||
|
PopulatedRelationship(int type, this.dartType, this.buildStep, this.resolver,
|
||||||
|
this.autoSnakeCaseNames, this.autoIdAndDateFields,
|
||||||
|
{String localKey,
|
||||||
|
String foreignKey,
|
||||||
|
String foreignTable,
|
||||||
|
bool cascadeOnDelete})
|
||||||
|
: super(type,
|
||||||
|
localKey: localKey,
|
||||||
|
foreignKey: foreignKey,
|
||||||
|
foreignTable: foreignTable,
|
||||||
|
cascadeOnDelete: cascadeOnDelete);
|
||||||
|
|
||||||
|
DartType get modelType {
|
||||||
|
if (_modelType != null) return _modelType;
|
||||||
|
DartType searchType = dartType;
|
||||||
|
var ormChecker = new TypeChecker.fromRuntime(ORM);
|
||||||
|
|
||||||
|
while (searchType != null) {
|
||||||
|
var classElement = searchType.element as ClassElement;
|
||||||
|
var ormAnnotation = ormChecker.firstAnnotationOf(classElement);
|
||||||
|
|
||||||
|
if (ormAnnotation != null) {
|
||||||
|
_modelTypeORM = ormAnnotation;
|
||||||
|
return _modelType = searchType;
|
||||||
|
} else {
|
||||||
|
// If we didn't find an @ORM(), then refer to the parent type.
|
||||||
|
searchType = classElement.supertype;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new StateError(
|
||||||
|
'Neither ${dartType.name} nor its parent types are annotated with an @ORM() annotation. It is impossible to compute this relationship.');
|
||||||
|
}
|
||||||
|
|
||||||
|
PostgresBuildContext get modelTypeContext {
|
||||||
|
if (_modelTypeContext != null) return _modelTypeContext;
|
||||||
|
var reader = new ConstantReader(_modelTypeORM);
|
||||||
|
if (reader.isNull)
|
||||||
|
reader = null;
|
||||||
|
else
|
||||||
|
reader = reader.read('tableName');
|
||||||
|
var orm = reader == null
|
||||||
|
? new ORM()
|
||||||
|
: new ORM(reader.isString ? reader.stringValue : null);
|
||||||
|
return _modelTypeContext = buildContext(modelType.element, orm, buildStep,
|
||||||
|
resolver, autoSnakeCaseNames, autoIdAndDateFields);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: angel_orm_generator
|
name: angel_orm_generator
|
||||||
version: 1.0.0-alpha+1
|
version: 1.0.0-alpha+2
|
||||||
description: Code generators for Angel's ORM.
|
description: Code generators for Angel's ORM.
|
||||||
author: Tobe O <thosakwe@gmail.com>
|
author: Tobe O <thosakwe@gmail.com>
|
||||||
homepage: https://github.com/angel-dart/orm
|
homepage: https://github.com/angel-dart/orm
|
||||||
|
|
135
angel_orm_generator/test/book_test.dart
Normal file
135
angel_orm_generator/test/book_test.dart
Normal file
|
@ -0,0 +1,135 @@
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'package:test/test.dart';
|
||||||
|
import 'models/author.dart';
|
||||||
|
import 'models/author.orm.g.dart';
|
||||||
|
import 'models/book.dart';
|
||||||
|
import 'models/book.orm.g.dart';
|
||||||
|
import 'common.dart';
|
||||||
|
|
||||||
|
main() {
|
||||||
|
PostgreSQLConnection connection;
|
||||||
|
Author rowling;
|
||||||
|
Book deathlyHallows;
|
||||||
|
|
||||||
|
setUp(() async {
|
||||||
|
connection = await connectToPostgres();
|
||||||
|
|
||||||
|
// Insert an author
|
||||||
|
rowling = await AuthorQuery.insert(connection, name: 'J.K. Rowling');
|
||||||
|
|
||||||
|
// And a book
|
||||||
|
deathlyHallows = await BookQuery.insert(connection,
|
||||||
|
authorId: int.parse(rowling.id), name: 'Deathly Hallows');
|
||||||
|
});
|
||||||
|
|
||||||
|
tearDown(() => connection.close());
|
||||||
|
|
||||||
|
group('selects', ()
|
||||||
|
{
|
||||||
|
test('select all', () async {
|
||||||
|
var query = new BookQuery();
|
||||||
|
var books = await query.get(connection).toList();
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows.id);
|
||||||
|
expect(book.name, deathlyHallows.name);
|
||||||
|
|
||||||
|
var author = book.author as Author;
|
||||||
|
print(author.toJson());
|
||||||
|
expect(author.id, rowling.id);
|
||||||
|
expect(author.name, rowling.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select one', () async {
|
||||||
|
var query = new BookQuery();
|
||||||
|
query.where.id.equals(int.parse(deathlyHallows.id));
|
||||||
|
print(query.toSql());
|
||||||
|
|
||||||
|
var book = await BookQuery.getOne(
|
||||||
|
int.parse(deathlyHallows.id), connection);
|
||||||
|
print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows.id);
|
||||||
|
expect(book.name, deathlyHallows.name);
|
||||||
|
|
||||||
|
var author = book.author as Author;
|
||||||
|
print(author.toJson());
|
||||||
|
expect(author.id, rowling.id);
|
||||||
|
expect(author.name, rowling.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('where clause', () async {
|
||||||
|
var query = new BookQuery()
|
||||||
|
..where.name.equals('Goblet of Fire')
|
||||||
|
..or(new BookQueryWhere()..authorId.equals(int.parse(rowling.id)));
|
||||||
|
print(query.toSql());
|
||||||
|
|
||||||
|
var books = await query.get(connection).toList();
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows.id);
|
||||||
|
expect(book.name, deathlyHallows.name);
|
||||||
|
|
||||||
|
var author = book.author as Author;
|
||||||
|
print(author.toJson());
|
||||||
|
expect(author.id, rowling.id);
|
||||||
|
expect(author.name, rowling.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('union', () async {
|
||||||
|
var query1 = new BookQuery()
|
||||||
|
..where.name.like('Deathly%');
|
||||||
|
var query2 = new BookQuery()
|
||||||
|
..where.authorId.equals(-1);
|
||||||
|
var query3 = new BookQuery()
|
||||||
|
..where.name.isIn(['Goblet of Fire', 'Order of the Phoenix']);
|
||||||
|
query1
|
||||||
|
..union(query2)
|
||||||
|
..unionAll(query3);
|
||||||
|
print(query1.toSql());
|
||||||
|
|
||||||
|
var books = await query1.get(connection).toList();
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows.id);
|
||||||
|
expect(book.name, deathlyHallows.name);
|
||||||
|
|
||||||
|
var author = book.author as Author;
|
||||||
|
print(author.toJson());
|
||||||
|
expect(author.id, rowling.id);
|
||||||
|
expect(author.name, rowling.name);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert sets relationship', () {
|
||||||
|
expect(deathlyHallows.author, isNotNull);
|
||||||
|
expect((deathlyHallows.author as Author).name, rowling.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('delete stream', () async {
|
||||||
|
var query = new BookQuery()..where.name.equals(deathlyHallows.name);
|
||||||
|
print(query.toSql());
|
||||||
|
var books = await query.delete(connection).toList();
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
expect(book.id, deathlyHallows.id);
|
||||||
|
expect(book.author, isNotNull);
|
||||||
|
expect((book.author as Author).name, rowling.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('update book', () async {
|
||||||
|
var cloned = deathlyHallows.clone()..name = 'Sorcerer\'s Stone';
|
||||||
|
var book = await BookQuery.updateBook(connection, cloned);
|
||||||
|
print(book.toJson());
|
||||||
|
expect(book.name, cloned.name);
|
||||||
|
expect(book.author, isNotNull);
|
||||||
|
expect((book.author as Author).name, rowling.name);
|
||||||
|
});
|
||||||
|
}
|
|
@ -16,7 +16,7 @@ main() {
|
||||||
var whereClause = query.where.toWhereClause();
|
var whereClause = query.where.toWhereClause();
|
||||||
print('Where clause: $whereClause');
|
print('Where clause: $whereClause');
|
||||||
expect(whereClause,
|
expect(whereClause,
|
||||||
'WHERE "family_friendly" = TRUE AND "recalled_at" <= \'2000-01-01\'');
|
'WHERE cars.family_friendly = TRUE AND cars.recalled_at <= \'2000-01-01\'');
|
||||||
});
|
});
|
||||||
|
|
||||||
test('parseRow', () {
|
test('parseRow', () {
|
||||||
|
|
|
@ -10,6 +10,10 @@ Future<PostgreSQLConnection> connectToPostgres() async {
|
||||||
|
|
||||||
var query = await new File('test/models/car.up.g.sql').readAsString();
|
var query = await new File('test/models/car.up.g.sql').readAsString();
|
||||||
await conn.execute(query);
|
await conn.execute(query);
|
||||||
|
query = await new File('test/models/author.up.g.sql').readAsString();
|
||||||
|
await conn.execute(query);
|
||||||
|
query = await new File('test/models/book.up.g.sql').readAsString();
|
||||||
|
await conn.execute(query);
|
||||||
|
|
||||||
return conn;
|
return conn;
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,12 +35,12 @@ class AuthorQuery {
|
||||||
|
|
||||||
void sortDescending(String key) {
|
void sortDescending(String key) {
|
||||||
_sortMode = 'Descending';
|
_sortMode = 'Descending';
|
||||||
_sortKey = key;
|
_sortKey = ('' + key);
|
||||||
}
|
}
|
||||||
|
|
||||||
void sortAscending(String key) {
|
void sortAscending(String key) {
|
||||||
_sortMode = 'Ascending';
|
_sortMode = 'Ascending';
|
||||||
_sortKey = key;
|
_sortKey = ('' + key);
|
||||||
}
|
}
|
||||||
|
|
||||||
void or(AuthorQueryWhere selector) {
|
void or(AuthorQueryWhere selector) {
|
||||||
|
@ -49,7 +49,10 @@ class AuthorQuery {
|
||||||
|
|
||||||
String toSql([String prefix]) {
|
String toSql([String prefix]) {
|
||||||
var buf = new StringBuffer();
|
var buf = new StringBuffer();
|
||||||
buf.write(prefix != null ? prefix : 'SELECT * FROM "authors"');
|
buf.write(prefix != null
|
||||||
|
? prefix
|
||||||
|
: 'SELECT id, name, created_at, updated_at FROM "authors"');
|
||||||
|
if (prefix == null) {}
|
||||||
var whereClause = where.toWhereClause();
|
var whereClause = where.toWhereClause();
|
||||||
if (whereClause != null) {
|
if (whereClause != null) {
|
||||||
buf.write(' ' + whereClause);
|
buf.write(' ' + whereClause);
|
||||||
|
@ -88,26 +91,33 @@ class AuthorQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
static Author parseRow(List row) {
|
static Author parseRow(List row) {
|
||||||
return new Author.fromJson({
|
var result = new Author.fromJson({
|
||||||
'id': row[0].toString(),
|
'id': row[0].toString(),
|
||||||
'name': row[1],
|
'name': row[1],
|
||||||
'created_at': row[2],
|
'created_at': row[2],
|
||||||
'updated_at': row[3]
|
'updated_at': row[3]
|
||||||
});
|
});
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream<Author> get(PostgreSQLConnection connection) {
|
Stream<Author> get(PostgreSQLConnection connection) {
|
||||||
StreamController<Author> ctrl = new StreamController<Author>();
|
StreamController<Author> ctrl = new StreamController<Author>();
|
||||||
connection.query(toSql()).then((rows) {
|
connection.query(toSql()).then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Author> getOne(int id, PostgreSQLConnection connection) {
|
static Future<Author> getOne(int id, PostgreSQLConnection connection) {
|
||||||
return connection.query('SELECT * FROM "authors" WHERE "id" = @id;',
|
var query = new AuthorQuery();
|
||||||
substitutionValues: {'id': id}).then((rows) => parseRow(rows.first));
|
query.where.id.equals(id);
|
||||||
|
return query.get(connection).first.catchError((_) => null);
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream<Author> update(PostgreSQLConnection connection,
|
Stream<Author> update(PostgreSQLConnection connection,
|
||||||
|
@ -128,8 +138,13 @@ class AuthorQuery {
|
||||||
'name': name,
|
'name': name,
|
||||||
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
}).then((rows) {
|
}).then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
|
@ -140,8 +155,13 @@ class AuthorQuery {
|
||||||
connection
|
connection
|
||||||
.query(toSql('DELETE FROM "authors"') +
|
.query(toSql('DELETE FROM "authors"') +
|
||||||
' RETURNING "id", "name", "created_at", "updated_at";')
|
' RETURNING "id", "name", "created_at", "updated_at";')
|
||||||
.then((rows) {
|
.then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
|
@ -165,7 +185,8 @@ class AuthorQuery {
|
||||||
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
});
|
});
|
||||||
return parseRow(result[0]);
|
var output = parseRow(result[0]);
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Author> insertAuthor(
|
static Future<Author> insertAuthor(
|
||||||
|
@ -199,18 +220,18 @@ class AuthorQueryWhere {
|
||||||
final StringSqlExpressionBuilder name = new StringSqlExpressionBuilder();
|
final StringSqlExpressionBuilder name = new StringSqlExpressionBuilder();
|
||||||
|
|
||||||
final DateTimeSqlExpressionBuilder createdAt =
|
final DateTimeSqlExpressionBuilder createdAt =
|
||||||
new DateTimeSqlExpressionBuilder('created_at');
|
new DateTimeSqlExpressionBuilder('authors.created_at');
|
||||||
|
|
||||||
final DateTimeSqlExpressionBuilder updatedAt =
|
final DateTimeSqlExpressionBuilder updatedAt =
|
||||||
new DateTimeSqlExpressionBuilder('updated_at');
|
new DateTimeSqlExpressionBuilder('authors.updated_at');
|
||||||
|
|
||||||
String toWhereClause({bool keyword}) {
|
String toWhereClause({bool keyword}) {
|
||||||
final List<String> expressions = [];
|
final List<String> expressions = [];
|
||||||
if (id.hasValue) {
|
if (id.hasValue) {
|
||||||
expressions.add('"id" ' + id.compile());
|
expressions.add('authors.id ' + id.compile());
|
||||||
}
|
}
|
||||||
if (name.hasValue) {
|
if (name.hasValue) {
|
||||||
expressions.add('"name" ' + name.compile());
|
expressions.add('authors.name ' + name.compile());
|
||||||
}
|
}
|
||||||
if (createdAt.hasValue) {
|
if (createdAt.hasValue) {
|
||||||
expressions.add(createdAt.compile());
|
expressions.add(createdAt.compile());
|
||||||
|
|
|
@ -2,5 +2,6 @@ CREATE TEMPORARY TABLE "authors" (
|
||||||
"id" serial,
|
"id" serial,
|
||||||
"name" varchar,
|
"name" varchar,
|
||||||
"created_at" timestamp,
|
"created_at" timestamp,
|
||||||
"updated_at" timestamp
|
"updated_at" timestamp,
|
||||||
|
PRIMARY KEY(id)
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
library angel_orm.test.models.author;
|
library angel_orm.test.models.book;
|
||||||
|
|
||||||
import 'package:angel_framework/common.dart';
|
import 'package:angel_framework/common.dart';
|
||||||
import 'package:angel_orm/angel_orm.dart';
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
part of angel_orm.test.models.author;
|
part of angel_orm.test.models.book;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
|
|
|
@ -36,12 +36,12 @@ class BookQuery {
|
||||||
|
|
||||||
void sortDescending(String key) {
|
void sortDescending(String key) {
|
||||||
_sortMode = 'Descending';
|
_sortMode = 'Descending';
|
||||||
_sortKey = key;
|
_sortKey = ('books.' + key);
|
||||||
}
|
}
|
||||||
|
|
||||||
void sortAscending(String key) {
|
void sortAscending(String key) {
|
||||||
_sortMode = 'Ascending';
|
_sortMode = 'Ascending';
|
||||||
_sortKey = key;
|
_sortKey = ('books.' + key);
|
||||||
}
|
}
|
||||||
|
|
||||||
void or(BookQueryWhere selector) {
|
void or(BookQueryWhere selector) {
|
||||||
|
@ -50,7 +50,12 @@ class BookQuery {
|
||||||
|
|
||||||
String toSql([String prefix]) {
|
String toSql([String prefix]) {
|
||||||
var buf = new StringBuffer();
|
var buf = new StringBuffer();
|
||||||
buf.write(prefix != null ? prefix : 'SELECT * FROM "books"');
|
buf.write(prefix != null
|
||||||
|
? prefix
|
||||||
|
: 'SELECT books.id, books.name, books.created_at, books.updated_at, books.author_id, authors.id, authors.name, authors.created_at, authors.updated_at FROM "books"');
|
||||||
|
if (prefix == null) {
|
||||||
|
buf.write(' INNER JOIN authors ON books.author_id = authors.id');
|
||||||
|
}
|
||||||
var whereClause = where.toWhereClause();
|
var whereClause = where.toWhereClause();
|
||||||
if (whereClause != null) {
|
if (whereClause != null) {
|
||||||
buf.write(' ' + whereClause);
|
buf.write(' ' + whereClause);
|
||||||
|
@ -89,33 +94,44 @@ class BookQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
static Book parseRow(List row) {
|
static Book parseRow(List row) {
|
||||||
return new Book.fromJson({
|
var result = new Book.fromJson({
|
||||||
'id': row[0].toString(),
|
'id': row[0].toString(),
|
||||||
'name': row[1],
|
'name': row[1],
|
||||||
'created_at': row[2],
|
'created_at': row[2],
|
||||||
'updated_at': row[3],
|
'updated_at': row[3],
|
||||||
'author': row.length < 5 ? null : AuthorQuery.parseRow(row[4])
|
'author_id': row[4]
|
||||||
});
|
});
|
||||||
|
if (row.length > 5) {
|
||||||
|
result.author = AuthorQuery.parseRow([row[5], row[6], row[7], row[8]]);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream<Book> get(PostgreSQLConnection connection) {
|
Stream<Book> get(PostgreSQLConnection connection) {
|
||||||
StreamController<Book> ctrl = new StreamController<Book>();
|
StreamController<Book> ctrl = new StreamController<Book>();
|
||||||
connection.query(toSql()).then((rows) {
|
connection.query(toSql()).then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
parsed.author = await AuthorQuery.getOne(row[4], connection);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Book> getOne(int id, PostgreSQLConnection connection) {
|
static Future<Book> getOne(int id, PostgreSQLConnection connection) {
|
||||||
return connection.query('SELECT * FROM "books" WHERE "id" = @id;',
|
var query = new BookQuery();
|
||||||
substitutionValues: {'id': id}).then((rows) => parseRow(rows.first));
|
query.where.id.equals(id);
|
||||||
|
return query.get(connection).first.catchError((_) => null);
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream<Book> update(PostgreSQLConnection connection,
|
Stream<Book> update(PostgreSQLConnection connection,
|
||||||
{String name, DateTime createdAt, DateTime updatedAt}) {
|
{String name, DateTime createdAt, DateTime updatedAt, int authorId}) {
|
||||||
var buf = new StringBuffer(
|
var buf = new StringBuffer(
|
||||||
'UPDATE "books" SET ("name", "created_at", "updated_at") = (@name, @createdAt, @updatedAt) ');
|
'UPDATE "books" SET ("name", "created_at", "updated_at", "author_id") = (@name, @createdAt, @updatedAt, @authorId) ');
|
||||||
var whereClause = where.toWhereClause();
|
var whereClause = where.toWhereClause();
|
||||||
if (whereClause == null) {
|
if (whereClause == null) {
|
||||||
buf.write('WHERE "id" = @id');
|
buf.write('WHERE "id" = @id');
|
||||||
|
@ -125,13 +141,21 @@ class BookQuery {
|
||||||
var __ormNow__ = new DateTime.now();
|
var __ormNow__ = new DateTime.now();
|
||||||
var ctrl = new StreamController<Book>();
|
var ctrl = new StreamController<Book>();
|
||||||
connection.query(
|
connection.query(
|
||||||
buf.toString() + ' RETURNING "id", "name", "created_at", "updated_at";',
|
buf.toString() +
|
||||||
|
' RETURNING "id", "name", "created_at", "updated_at", "author_id";',
|
||||||
substitutionValues: {
|
substitutionValues: {
|
||||||
'name': name,
|
'name': name,
|
||||||
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__,
|
||||||
}).then((rows) {
|
'authorId': authorId
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
}).then((rows) async {
|
||||||
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
parsed.author = await AuthorQuery.getOne(row[4], connection);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
|
@ -141,9 +165,15 @@ class BookQuery {
|
||||||
StreamController<Book> ctrl = new StreamController<Book>();
|
StreamController<Book> ctrl = new StreamController<Book>();
|
||||||
connection
|
connection
|
||||||
.query(toSql('DELETE FROM "books"') +
|
.query(toSql('DELETE FROM "books"') +
|
||||||
' RETURNING "id", "name", "created_at", "updated_at";')
|
' RETURNING "id", "name", "created_at", "updated_at", "author_id";')
|
||||||
.then((rows) {
|
.then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
parsed.author = await AuthorQuery.getOne(row[4], connection);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
|
@ -151,27 +181,37 @@ class BookQuery {
|
||||||
|
|
||||||
static Future<Book> deleteOne(int id, PostgreSQLConnection connection) async {
|
static Future<Book> deleteOne(int id, PostgreSQLConnection connection) async {
|
||||||
var result = await connection.query(
|
var result = await connection.query(
|
||||||
'DELETE FROM "books" WHERE id = @id RETURNING "id", "name", "created_at", "updated_at";',
|
'DELETE FROM "books" WHERE id = @id RETURNING "id", "name", "created_at", "updated_at", "author_id";',
|
||||||
substitutionValues: {'id': id});
|
substitutionValues: {'id': id});
|
||||||
return parseRow(result[0]);
|
return parseRow(result[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Book> insert(PostgreSQLConnection connection,
|
static Future<Book> insert(PostgreSQLConnection connection,
|
||||||
{String name, DateTime createdAt, DateTime updatedAt}) async {
|
{String name,
|
||||||
|
DateTime createdAt,
|
||||||
|
DateTime updatedAt,
|
||||||
|
int authorId}) async {
|
||||||
var __ormNow__ = new DateTime.now();
|
var __ormNow__ = new DateTime.now();
|
||||||
var result = await connection.query(
|
var result = await connection.query(
|
||||||
'INSERT INTO "books" ("name", "created_at", "updated_at") VALUES (@name, @createdAt, @updatedAt) RETURNING "id", "name", "created_at", "updated_at";',
|
'INSERT INTO "books" ("name", "created_at", "updated_at", "author_id") VALUES (@name, @createdAt, @updatedAt, @authorId) RETURNING "id", "name", "created_at", "updated_at", "author_id";',
|
||||||
substitutionValues: {
|
substitutionValues: {
|
||||||
'name': name,
|
'name': name,
|
||||||
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__,
|
||||||
|
'authorId': authorId
|
||||||
});
|
});
|
||||||
return parseRow(result[0]);
|
var output = parseRow(result[0]);
|
||||||
|
output.author = await AuthorQuery.getOne(result[0][4], connection);
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Book> insertBook(PostgreSQLConnection connection, Book book) {
|
static Future<Book> insertBook(PostgreSQLConnection connection, Book book,
|
||||||
|
{int authorId}) {
|
||||||
return BookQuery.insert(connection,
|
return BookQuery.insert(connection,
|
||||||
name: book.name, createdAt: book.createdAt, updatedAt: book.updatedAt);
|
name: book.name,
|
||||||
|
createdAt: book.createdAt,
|
||||||
|
updatedAt: book.updatedAt,
|
||||||
|
authorId: authorId);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Book> updateBook(PostgreSQLConnection connection, Book book) {
|
static Future<Book> updateBook(PostgreSQLConnection connection, Book book) {
|
||||||
|
@ -181,7 +221,8 @@ class BookQuery {
|
||||||
.update(connection,
|
.update(connection,
|
||||||
name: book.name,
|
name: book.name,
|
||||||
createdAt: book.createdAt,
|
createdAt: book.createdAt,
|
||||||
updatedAt: book.updatedAt)
|
updatedAt: book.updatedAt,
|
||||||
|
authorId: int.parse(book.author.id))
|
||||||
.first;
|
.first;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,18 +237,21 @@ class BookQueryWhere {
|
||||||
final StringSqlExpressionBuilder name = new StringSqlExpressionBuilder();
|
final StringSqlExpressionBuilder name = new StringSqlExpressionBuilder();
|
||||||
|
|
||||||
final DateTimeSqlExpressionBuilder createdAt =
|
final DateTimeSqlExpressionBuilder createdAt =
|
||||||
new DateTimeSqlExpressionBuilder('created_at');
|
new DateTimeSqlExpressionBuilder('books.created_at');
|
||||||
|
|
||||||
final DateTimeSqlExpressionBuilder updatedAt =
|
final DateTimeSqlExpressionBuilder updatedAt =
|
||||||
new DateTimeSqlExpressionBuilder('updated_at');
|
new DateTimeSqlExpressionBuilder('books.updated_at');
|
||||||
|
|
||||||
|
final NumericSqlExpressionBuilder<int> authorId =
|
||||||
|
new NumericSqlExpressionBuilder<int>();
|
||||||
|
|
||||||
String toWhereClause({bool keyword}) {
|
String toWhereClause({bool keyword}) {
|
||||||
final List<String> expressions = [];
|
final List<String> expressions = [];
|
||||||
if (id.hasValue) {
|
if (id.hasValue) {
|
||||||
expressions.add('"id" ' + id.compile());
|
expressions.add('books.id ' + id.compile());
|
||||||
}
|
}
|
||||||
if (name.hasValue) {
|
if (name.hasValue) {
|
||||||
expressions.add('"name" ' + name.compile());
|
expressions.add('books.name ' + name.compile());
|
||||||
}
|
}
|
||||||
if (createdAt.hasValue) {
|
if (createdAt.hasValue) {
|
||||||
expressions.add(createdAt.compile());
|
expressions.add(createdAt.compile());
|
||||||
|
@ -215,6 +259,9 @@ class BookQueryWhere {
|
||||||
if (updatedAt.hasValue) {
|
if (updatedAt.hasValue) {
|
||||||
expressions.add(updatedAt.compile());
|
expressions.add(updatedAt.compile());
|
||||||
}
|
}
|
||||||
|
if (authorId.hasValue) {
|
||||||
|
expressions.add('books.author_id ' + authorId.compile());
|
||||||
|
}
|
||||||
return expressions.isEmpty
|
return expressions.isEmpty
|
||||||
? null
|
? null
|
||||||
: ((keyword != false ? 'WHERE ' : '') + expressions.join(' AND '));
|
: ((keyword != false ? 'WHERE ' : '') + expressions.join(' AND '));
|
||||||
|
|
|
@ -2,5 +2,7 @@ CREATE TEMPORARY TABLE "books" (
|
||||||
"id" serial,
|
"id" serial,
|
||||||
"name" varchar,
|
"name" varchar,
|
||||||
"created_at" timestamp,
|
"created_at" timestamp,
|
||||||
"updated_at" timestamp
|
"updated_at" timestamp,
|
||||||
|
"author_id" int REFERENCES authors(id) ON DELETE CASCADE,
|
||||||
|
PRIMARY KEY(id)
|
||||||
);
|
);
|
||||||
|
|
|
@ -35,12 +35,12 @@ class CarQuery {
|
||||||
|
|
||||||
void sortDescending(String key) {
|
void sortDescending(String key) {
|
||||||
_sortMode = 'Descending';
|
_sortMode = 'Descending';
|
||||||
_sortKey = key;
|
_sortKey = ('' + key);
|
||||||
}
|
}
|
||||||
|
|
||||||
void sortAscending(String key) {
|
void sortAscending(String key) {
|
||||||
_sortMode = 'Ascending';
|
_sortMode = 'Ascending';
|
||||||
_sortKey = key;
|
_sortKey = ('' + key);
|
||||||
}
|
}
|
||||||
|
|
||||||
void or(CarQueryWhere selector) {
|
void or(CarQueryWhere selector) {
|
||||||
|
@ -49,7 +49,10 @@ class CarQuery {
|
||||||
|
|
||||||
String toSql([String prefix]) {
|
String toSql([String prefix]) {
|
||||||
var buf = new StringBuffer();
|
var buf = new StringBuffer();
|
||||||
buf.write(prefix != null ? prefix : 'SELECT * FROM "cars"');
|
buf.write(prefix != null
|
||||||
|
? prefix
|
||||||
|
: 'SELECT id, make, description, family_friendly, recalled_at, created_at, updated_at FROM "cars"');
|
||||||
|
if (prefix == null) {}
|
||||||
var whereClause = where.toWhereClause();
|
var whereClause = where.toWhereClause();
|
||||||
if (whereClause != null) {
|
if (whereClause != null) {
|
||||||
buf.write(' ' + whereClause);
|
buf.write(' ' + whereClause);
|
||||||
|
@ -88,7 +91,7 @@ class CarQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
static Car parseRow(List row) {
|
static Car parseRow(List row) {
|
||||||
return new Car.fromJson({
|
var result = new Car.fromJson({
|
||||||
'id': row[0].toString(),
|
'id': row[0].toString(),
|
||||||
'make': row[1],
|
'make': row[1],
|
||||||
'description': row[2],
|
'description': row[2],
|
||||||
|
@ -97,20 +100,27 @@ class CarQuery {
|
||||||
'created_at': row[5],
|
'created_at': row[5],
|
||||||
'updated_at': row[6]
|
'updated_at': row[6]
|
||||||
});
|
});
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream<Car> get(PostgreSQLConnection connection) {
|
Stream<Car> get(PostgreSQLConnection connection) {
|
||||||
StreamController<Car> ctrl = new StreamController<Car>();
|
StreamController<Car> ctrl = new StreamController<Car>();
|
||||||
connection.query(toSql()).then((rows) {
|
connection.query(toSql()).then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Car> getOne(int id, PostgreSQLConnection connection) {
|
static Future<Car> getOne(int id, PostgreSQLConnection connection) {
|
||||||
return connection.query('SELECT * FROM "cars" WHERE "id" = @id;',
|
var query = new CarQuery();
|
||||||
substitutionValues: {'id': id}).then((rows) => parseRow(rows.first));
|
query.where.id.equals(id);
|
||||||
|
return query.get(connection).first.catchError((_) => null);
|
||||||
}
|
}
|
||||||
|
|
||||||
Stream<Car> update(PostgreSQLConnection connection,
|
Stream<Car> update(PostgreSQLConnection connection,
|
||||||
|
@ -140,8 +150,13 @@ class CarQuery {
|
||||||
'recalledAt': recalledAt,
|
'recalledAt': recalledAt,
|
||||||
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
}).then((rows) {
|
}).then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
|
@ -152,8 +167,13 @@ class CarQuery {
|
||||||
connection
|
connection
|
||||||
.query(toSql('DELETE FROM "cars"') +
|
.query(toSql('DELETE FROM "cars"') +
|
||||||
' RETURNING "id", "make", "description", "family_friendly", "recalled_at", "created_at", "updated_at";')
|
' RETURNING "id", "make", "description", "family_friendly", "recalled_at", "created_at", "updated_at";')
|
||||||
.then((rows) {
|
.then((rows) async {
|
||||||
rows.map(parseRow).forEach(ctrl.add);
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
ctrl.close();
|
ctrl.close();
|
||||||
}).catchError(ctrl.addError);
|
}).catchError(ctrl.addError);
|
||||||
return ctrl.stream;
|
return ctrl.stream;
|
||||||
|
@ -184,7 +204,8 @@ class CarQuery {
|
||||||
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
});
|
});
|
||||||
return parseRow(result[0]);
|
var output = parseRow(result[0]);
|
||||||
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Future<Car> insertCar(PostgreSQLConnection connection, Car car) {
|
static Future<Car> insertCar(PostgreSQLConnection connection, Car car) {
|
||||||
|
@ -228,27 +249,27 @@ class CarQueryWhere {
|
||||||
new BooleanSqlExpressionBuilder();
|
new BooleanSqlExpressionBuilder();
|
||||||
|
|
||||||
final DateTimeSqlExpressionBuilder recalledAt =
|
final DateTimeSqlExpressionBuilder recalledAt =
|
||||||
new DateTimeSqlExpressionBuilder('recalled_at');
|
new DateTimeSqlExpressionBuilder('cars.recalled_at');
|
||||||
|
|
||||||
final DateTimeSqlExpressionBuilder createdAt =
|
final DateTimeSqlExpressionBuilder createdAt =
|
||||||
new DateTimeSqlExpressionBuilder('created_at');
|
new DateTimeSqlExpressionBuilder('cars.created_at');
|
||||||
|
|
||||||
final DateTimeSqlExpressionBuilder updatedAt =
|
final DateTimeSqlExpressionBuilder updatedAt =
|
||||||
new DateTimeSqlExpressionBuilder('updated_at');
|
new DateTimeSqlExpressionBuilder('cars.updated_at');
|
||||||
|
|
||||||
String toWhereClause({bool keyword}) {
|
String toWhereClause({bool keyword}) {
|
||||||
final List<String> expressions = [];
|
final List<String> expressions = [];
|
||||||
if (id.hasValue) {
|
if (id.hasValue) {
|
||||||
expressions.add('"id" ' + id.compile());
|
expressions.add('cars.id ' + id.compile());
|
||||||
}
|
}
|
||||||
if (make.hasValue) {
|
if (make.hasValue) {
|
||||||
expressions.add('"make" ' + make.compile());
|
expressions.add('cars.make ' + make.compile());
|
||||||
}
|
}
|
||||||
if (description.hasValue) {
|
if (description.hasValue) {
|
||||||
expressions.add('"description" ' + description.compile());
|
expressions.add('cars.description ' + description.compile());
|
||||||
}
|
}
|
||||||
if (familyFriendly.hasValue) {
|
if (familyFriendly.hasValue) {
|
||||||
expressions.add('"family_friendly" ' + familyFriendly.compile());
|
expressions.add('cars.family_friendly ' + familyFriendly.compile());
|
||||||
}
|
}
|
||||||
if (recalledAt.hasValue) {
|
if (recalledAt.hasValue) {
|
||||||
expressions.add(recalledAt.compile());
|
expressions.add(recalledAt.compile());
|
||||||
|
|
|
@ -5,5 +5,6 @@ CREATE TEMPORARY TABLE "cars" (
|
||||||
"family_friendly" boolean,
|
"family_friendly" boolean,
|
||||||
"recalled_at" timestamp,
|
"recalled_at" timestamp,
|
||||||
"created_at" timestamp,
|
"created_at" timestamp,
|
||||||
"updated_at" timestamp
|
"updated_at" timestamp,
|
||||||
|
PRIMARY KEY(id)
|
||||||
);
|
);
|
||||||
|
|
|
@ -3,16 +3,28 @@ import 'package:source_gen/source_gen.dart';
|
||||||
import 'package:angel_orm_generator/angel_orm_generator.dart';
|
import 'package:angel_orm_generator/angel_orm_generator.dart';
|
||||||
import 'package:angel_serialize_generator/angel_serialize_generator.dart';
|
import 'package:angel_serialize_generator/angel_serialize_generator.dart';
|
||||||
|
|
||||||
final InputSet MODELS =
|
final InputSet ALL_MODELS =
|
||||||
new InputSet('angel_orm_generator', const ['test/models/*.dart']);
|
new InputSet('angel_orm_generator', const ['test/models/*.dart']);
|
||||||
|
final InputSet STANDALONE_MODELS = new InputSet('angel_orm_generator',
|
||||||
|
const ['test/models/car.dart', 'test/models/author.dart']);
|
||||||
|
final InputSet DEPENDENT_MODELS =
|
||||||
|
new InputSet('angel_orm_generator', const ['test/models/book.dart']);
|
||||||
|
|
||||||
final PhaseGroup PHASES = new PhaseGroup()
|
final PhaseGroup PHASES = new PhaseGroup()
|
||||||
..addPhase(new Phase()
|
..addPhase(new Phase()
|
||||||
..addAction(new GeneratorBuilder([const JsonModelGenerator()]), MODELS))
|
..addAction(
|
||||||
|
new GeneratorBuilder([const JsonModelGenerator()]), STANDALONE_MODELS)
|
||||||
|
..addAction(
|
||||||
|
new GeneratorBuilder([const JsonModelGenerator()]), DEPENDENT_MODELS))
|
||||||
..addPhase(new Phase()
|
..addPhase(new Phase()
|
||||||
..addAction(
|
..addAction(
|
||||||
new GeneratorBuilder([new PostgresORMGenerator()],
|
new GeneratorBuilder([new PostgresORMGenerator()],
|
||||||
isStandalone: true, generatedExtension: '.orm.g.dart'),
|
isStandalone: true, generatedExtension: '.orm.g.dart'),
|
||||||
MODELS))
|
STANDALONE_MODELS))
|
||||||
..addPhase(new Phase()
|
..addPhase(new Phase()
|
||||||
..addAction(new SQLMigrationGenerator(temporary: true), MODELS));
|
..addAction(
|
||||||
|
new GeneratorBuilder([new PostgresORMGenerator()],
|
||||||
|
isStandalone: true, generatedExtension: '.orm.g.dart'),
|
||||||
|
DEPENDENT_MODELS))
|
||||||
|
..addPhase(new Phase()
|
||||||
|
..addAction(new SQLMigrationGenerator(temporary: true), ALL_MODELS));
|
||||||
|
|
Loading…
Reference in a new issue