+4
This commit is contained in:
parent
abf438196e
commit
ed9b675ac3
48 changed files with 4155 additions and 150 deletions
|
@ -23,10 +23,10 @@ dev_dependencies:
|
||||||
|
|
||||||
`package:angel_orm_generator` exports two classes that you can include
|
`package:angel_orm_generator` exports two classes that you can include
|
||||||
in a `package:build` flow:
|
in a `package:build` flow:
|
||||||
* `PostgreORMGenerator` - Fueled by `package:source_gen`; include this within a `GeneratorBuilder`.
|
* `PostgresOrmGenerator` - Fueled by `package:source_gen`; include this within a `LibraryBuilder`.
|
||||||
* `SQLMigrationGenerator` - This is its own `Builder`; it generates a SQL schema, as well as a SQL script to drop a generated table.
|
* `SqlMigrationBuilder` - This is its own `Builder`; it generates a SQL schema, as well as a SQL script to drop a generated table.
|
||||||
|
|
||||||
You should pass an `InputSet` containing your project's models.
|
You should pass an `List<String>` containing your project's models.
|
||||||
|
|
||||||
# Models
|
# Models
|
||||||
Your model, courtesy of `package:angel_serialize`:
|
Your model, courtesy of `package:angel_serialize`:
|
||||||
|
|
184
angel_orm_generator/lib/src/builder/orm/build_context.dart
Normal file
184
angel_orm_generator/lib/src/builder/orm/build_context.dart
Normal file
|
@ -0,0 +1,184 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:analyzer/dart/constant/value.dart';
|
||||||
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:analyzer/dart/element/type.dart';
|
||||||
|
import 'package:analyzer/src/dart/element/element.dart';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:angel_serialize_generator/build_context.dart' as serialize;
|
||||||
|
import 'package:angel_serialize_generator/context.dart' as serialize;
|
||||||
|
import 'package:build/build.dart';
|
||||||
|
import 'package:inflection/inflection.dart';
|
||||||
|
import 'package:recase/recase.dart';
|
||||||
|
import 'package:source_gen/source_gen.dart';
|
||||||
|
import 'postgres_build_context.dart';
|
||||||
|
|
||||||
|
const TypeChecker columnTypeChecker = const TypeChecker.fromRuntime(Column),
|
||||||
|
dateTimeTypeChecker = const TypeChecker.fromRuntime(DateTime),
|
||||||
|
ormTypeChecker = const TypeChecker.fromRuntime(ORM),
|
||||||
|
relationshipTypeChecker = const TypeChecker.fromRuntime(Relationship);
|
||||||
|
|
||||||
|
const TypeChecker hasOneTypeChecker = const TypeChecker.fromRuntime(HasOne),
|
||||||
|
hasManyTypeChecker = const TypeChecker.fromRuntime(HasMany),
|
||||||
|
belongsToTypeChecker = const TypeChecker.fromRuntime(BelongsTo),
|
||||||
|
belongsToManyTypeChecker = const TypeChecker.fromRuntime(BelongsToMany);
|
||||||
|
|
||||||
|
ColumnType inferColumnType(DartType type) {
|
||||||
|
if (const TypeChecker.fromRuntime(String).isAssignableFromType(type))
|
||||||
|
return ColumnType.VAR_CHAR;
|
||||||
|
if (const TypeChecker.fromRuntime(int).isAssignableFromType(type))
|
||||||
|
return ColumnType.INT;
|
||||||
|
if (const TypeChecker.fromRuntime(double).isAssignableFromType(type))
|
||||||
|
return ColumnType.DECIMAL;
|
||||||
|
if (const TypeChecker.fromRuntime(num).isAssignableFromType(type))
|
||||||
|
return ColumnType.NUMERIC;
|
||||||
|
if (const TypeChecker.fromRuntime(bool).isAssignableFromType(type))
|
||||||
|
return ColumnType.BOOLEAN;
|
||||||
|
if (const TypeChecker.fromRuntime(DateTime).isAssignableFromType(type))
|
||||||
|
return ColumnType.TIME_STAMP;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
Column reviveColumn(ConstantReader cr) {
|
||||||
|
// TODO: Get index type, column type...
|
||||||
|
var args = cr.revive().namedArguments;
|
||||||
|
IndexType indexType = IndexType.NONE;
|
||||||
|
ColumnType columnType;
|
||||||
|
|
||||||
|
if (args.containsKey('index')) {
|
||||||
|
indexType = IndexType.values[args['index'].getField('index').toIntValue()];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (args.containsKey('type')) {
|
||||||
|
columnType = new _ColumnType(args['type'].getField('name').toStringValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Column(
|
||||||
|
nullable: cr.peek('nullable')?.boolValue,
|
||||||
|
length: cr.peek('length')?.intValue,
|
||||||
|
defaultValue: cr.peek('defaultValue')?.literalValue,
|
||||||
|
type: columnType,
|
||||||
|
index: indexType,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
ORM reviveOrm(ConstantReader cr) {
|
||||||
|
return new ORM(cr.peek('tableName')?.stringValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
Relationship reviveRelationship(DartObject relationshipAnnotation) {
|
||||||
|
var cr = new ConstantReader(relationshipAnnotation);
|
||||||
|
var r = cr.revive().namedArguments;
|
||||||
|
int type = -1;
|
||||||
|
|
||||||
|
if (cr.instanceOf(hasOneTypeChecker))
|
||||||
|
type = RelationshipType.HAS_ONE;
|
||||||
|
else if (cr.instanceOf(hasManyTypeChecker))
|
||||||
|
type = RelationshipType.HAS_MANY;
|
||||||
|
else if (cr.instanceOf(belongsToTypeChecker))
|
||||||
|
type = RelationshipType.BELONGS_TO;
|
||||||
|
else if (cr.instanceOf(belongsToManyTypeChecker))
|
||||||
|
type = RelationshipType.BELONGS_TO_MANY;
|
||||||
|
else
|
||||||
|
throw new UnsupportedError(
|
||||||
|
'Unsupported relationship type "${relationshipAnnotation.type.name}".');
|
||||||
|
|
||||||
|
return new Relationship(type,
|
||||||
|
localKey: r['localKey']?.toStringValue(),
|
||||||
|
foreignKey: r['foreignKey']?.toStringValue(),
|
||||||
|
foreignTable: r['foreignTable']?.toStringValue(),
|
||||||
|
cascadeOnDelete: r['cascadeOnDelete']?.toBoolValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<PostgresBuildContext> buildContext(
|
||||||
|
ClassElement clazz,
|
||||||
|
ORM annotation,
|
||||||
|
BuildStep buildStep,
|
||||||
|
Resolver resolver,
|
||||||
|
bool autoSnakeCaseNames,
|
||||||
|
bool autoIdAndDateFields) async {
|
||||||
|
var raw = await serialize.buildContext(clazz, null, buildStep, resolver,
|
||||||
|
autoSnakeCaseNames != false, autoIdAndDateFields != false);
|
||||||
|
var ctx = await PostgresBuildContext.create(
|
||||||
|
raw, annotation, resolver, buildStep,
|
||||||
|
tableName: (annotation.tableName?.isNotEmpty == true)
|
||||||
|
? annotation.tableName
|
||||||
|
: pluralize(new ReCase(clazz.name).snakeCase),
|
||||||
|
autoSnakeCaseNames: autoSnakeCaseNames != false,
|
||||||
|
autoIdAndDateFields: autoIdAndDateFields != false);
|
||||||
|
List<String> fieldNames = [];
|
||||||
|
List<FieldElement> fields = [];
|
||||||
|
|
||||||
|
for (var field in raw.fields) {
|
||||||
|
fieldNames.add(field.name);
|
||||||
|
// Check for relationship. If so, skip.
|
||||||
|
var relationshipAnnotation =
|
||||||
|
relationshipTypeChecker.firstAnnotationOf(field);
|
||||||
|
|
||||||
|
if (relationshipAnnotation != null) {
|
||||||
|
ctx.relationshipFields.add(field);
|
||||||
|
ctx.relationships[field.name] =
|
||||||
|
reviveRelationship(relationshipAnnotation);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for column annotation...
|
||||||
|
Column column;
|
||||||
|
var columnAnnotation = columnTypeChecker.firstAnnotationOf(field);
|
||||||
|
|
||||||
|
if (columnAnnotation != null) {
|
||||||
|
column = reviveColumn(new ConstantReader(columnAnnotation));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (column == null && field.name == 'id' && ctx.shimmed['id'] == true) {
|
||||||
|
column = const Column(type: ColumnType.SERIAL);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (column == null) {
|
||||||
|
// Guess what kind of column this is...
|
||||||
|
column = new Column(
|
||||||
|
type: inferColumnType(
|
||||||
|
field.type,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (column?.type == null)
|
||||||
|
throw 'Cannot infer SQL column type for field "${field.name}" with type "${field.type.name}".';
|
||||||
|
ctx.columnInfo[field.name] = column;
|
||||||
|
fields.add(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.fields.addAll(fields);
|
||||||
|
|
||||||
|
// Add belongs to fields
|
||||||
|
// TODO: Do this for belongs to many as well
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
var rc = new ReCase(relationship.localKey);
|
||||||
|
|
||||||
|
if (relationship.type == RelationshipType.BELONGS_TO) {
|
||||||
|
ctx.fields.removeWhere((f) => f.name == rc.camelCase);
|
||||||
|
var field = new RelationshipConstraintField(
|
||||||
|
rc.camelCase, ctx.typeProvider.intType, name);
|
||||||
|
ctx.fields.add(field);
|
||||||
|
ctx.aliases[field.name] = relationship.localKey;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return ctx;
|
||||||
|
}
|
||||||
|
|
||||||
|
class RelationshipConstraintField extends FieldElementImpl {
|
||||||
|
@override
|
||||||
|
final DartType type;
|
||||||
|
final String originalName;
|
||||||
|
RelationshipConstraintField(String name, this.type, this.originalName)
|
||||||
|
: super(name, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
class _ColumnType implements ColumnType {
|
||||||
|
@override
|
||||||
|
final String name;
|
||||||
|
|
||||||
|
_ColumnType(this.name);
|
||||||
|
}
|
165
angel_orm_generator/lib/src/builder/orm/migration.dart
Normal file
165
angel_orm_generator/lib/src/builder/orm/migration.dart
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:analyzer/dart/ast/ast.dart';
|
||||||
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:build/build.dart';
|
||||||
|
import 'build_context.dart';
|
||||||
|
import 'package:source_gen/source_gen.dart';
|
||||||
|
import 'postgres_build_context.dart';
|
||||||
|
|
||||||
|
class SqlMigrationBuilder implements Builder {
|
||||||
|
/// If `true` (default), then field names will automatically be (de)serialized as snake_case.
|
||||||
|
final bool autoSnakeCaseNames;
|
||||||
|
|
||||||
|
/// If `true` (default), then the schema will automatically add id, created_at and updated_at fields.
|
||||||
|
final bool autoIdAndDateFields;
|
||||||
|
|
||||||
|
/// If `true` (default: `false`), then the resulting schema will generate a `TEMPORARY` table.
|
||||||
|
final bool temporary;
|
||||||
|
|
||||||
|
const SqlMigrationBuilder(
|
||||||
|
{this.autoSnakeCaseNames: true,
|
||||||
|
this.autoIdAndDateFields: true,
|
||||||
|
this.temporary: false});
|
||||||
|
|
||||||
|
@override
|
||||||
|
Map<String, List<String>> get buildExtensions => {
|
||||||
|
'.dart': ['.up.g.sql', '.down.g.sql']
|
||||||
|
};
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future build(BuildStep buildStep) async {
|
||||||
|
var resolver = await buildStep.resolver;
|
||||||
|
var up = new StringBuffer();
|
||||||
|
var down = new StringBuffer();
|
||||||
|
|
||||||
|
if (!await resolver.isLibrary(buildStep.inputId)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var lib = await resolver.libraryFor(buildStep.inputId);
|
||||||
|
var elements = lib.definingCompilationUnit.unit.declarations;
|
||||||
|
|
||||||
|
if (!elements.any(
|
||||||
|
(el) => ormTypeChecker.firstAnnotationOf(el.element) != null)) return;
|
||||||
|
|
||||||
|
await generateSqlMigrations(lib, resolver, buildStep, up, down);
|
||||||
|
buildStep.writeAsString(
|
||||||
|
buildStep.inputId.changeExtension('.up.g.sql'), up.toString());
|
||||||
|
buildStep.writeAsString(
|
||||||
|
buildStep.inputId.changeExtension('.down.g.sql'), down.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
Future generateSqlMigrations(LibraryElement libraryElement, Resolver resolver,
|
||||||
|
BuildStep buildStep, StringBuffer up, StringBuffer down) async {
|
||||||
|
List<String> done = [];
|
||||||
|
for (var element
|
||||||
|
in libraryElement.definingCompilationUnit.unit.declarations) {
|
||||||
|
if (element is ClassDeclaration && !done.contains(element.name)) {
|
||||||
|
var ann = ormTypeChecker.firstAnnotationOf(element.element);
|
||||||
|
if (ann != null) {
|
||||||
|
var ctx = await buildContext(
|
||||||
|
element.element,
|
||||||
|
reviveOrm(new ConstantReader(ann)),
|
||||||
|
buildStep,
|
||||||
|
resolver,
|
||||||
|
autoSnakeCaseNames != false,
|
||||||
|
autoIdAndDateFields != false);
|
||||||
|
buildUpMigration(ctx, up);
|
||||||
|
buildDownMigration(ctx, down);
|
||||||
|
done.add(element.name.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void buildUpMigration(PostgresBuildContext ctx, StringBuffer buf) {
|
||||||
|
if (temporary == true)
|
||||||
|
buf.writeln('CREATE TEMPORARY TABLE "${ctx.tableName}" (');
|
||||||
|
else
|
||||||
|
buf.writeln('CREATE TABLE "${ctx.tableName}" (');
|
||||||
|
|
||||||
|
List<String> dup = [];
|
||||||
|
int i = 0;
|
||||||
|
ctx.columnInfo.forEach((name, col) {
|
||||||
|
var key = ctx.resolveFieldName(name);
|
||||||
|
|
||||||
|
if (dup.contains(key))
|
||||||
|
return;
|
||||||
|
else {
|
||||||
|
if (key != 'id' || autoIdAndDateFields == false) {
|
||||||
|
// Check for relationships that might duplicate
|
||||||
|
for (var rName in ctx.relationships.keys) {
|
||||||
|
var relationship = ctx.populateRelationship(rName);
|
||||||
|
if (relationship.localKey == key) return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dup.add(key);
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write(' "$key" ${col.type.name}');
|
||||||
|
|
||||||
|
if (col.index == IndexType.PRIMARY_KEY)
|
||||||
|
buf.write(' PRIMARY KEY');
|
||||||
|
else if (col.index == IndexType.UNIQUE) buf.write(' UNIQUE');
|
||||||
|
|
||||||
|
if (col.nullable != true) buf.write(' NOT NULLABLE');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Relations
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
if (relationship.isBelongsTo) {
|
||||||
|
var key = relationship.localKey;
|
||||||
|
|
||||||
|
if (dup.contains(key))
|
||||||
|
return;
|
||||||
|
else {
|
||||||
|
dup.add(key);
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write(
|
||||||
|
' "${relationship.localKey}" int REFERENCES ${relationship.foreignTable}(${relationship.foreignKey})');
|
||||||
|
if (relationship.cascadeOnDelete != false && relationship.isSingular)
|
||||||
|
buf.write(' ON DELETE CASCADE');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Primary keys, unique
|
||||||
|
bool hasPrimary = false;
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
var col = ctx.columnInfo[f.name];
|
||||||
|
if (col != null) {
|
||||||
|
var name = ctx.resolveFieldName(f.name);
|
||||||
|
if (col.index == IndexType.UNIQUE) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
buf.write(' UNIQUE($name)');
|
||||||
|
} else if (col.index == IndexType.PRIMARY_KEY) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
hasPrimary = true;
|
||||||
|
buf.write(' PRIMARY KEY($name)');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!hasPrimary) {
|
||||||
|
var idField =
|
||||||
|
ctx.fields.firstWhere((f) => f.name == 'id', orElse: () => null);
|
||||||
|
if (idField != null) {
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
buf.write(' PRIMARY KEY(id)');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.writeln();
|
||||||
|
buf.writeln(');');
|
||||||
|
}
|
||||||
|
|
||||||
|
void buildDownMigration(PostgresBuildContext ctx, StringBuffer buf) {
|
||||||
|
buf.writeln('DROP TABLE "${ctx.tableName}";');
|
||||||
|
}
|
||||||
|
}
|
969
angel_orm_generator/lib/src/builder/orm/postgres.dart
Normal file
969
angel_orm_generator/lib/src/builder/orm/postgres.dart
Normal file
|
@ -0,0 +1,969 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:analyzer/dart/ast/ast.dart';
|
||||||
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:build/build.dart';
|
||||||
|
import 'package:code_builder/dart/async.dart';
|
||||||
|
import 'package:code_builder/dart/core.dart';
|
||||||
|
import 'package:code_builder/code_builder.dart';
|
||||||
|
import 'package:path/path.dart' as p;
|
||||||
|
import 'package:recase/recase.dart';
|
||||||
|
import 'package:source_gen/source_gen.dart' hide LibraryBuilder;
|
||||||
|
import 'build_context.dart';
|
||||||
|
import 'postgres_build_context.dart';
|
||||||
|
|
||||||
|
const List<String> RELATIONS = const ['or'];
|
||||||
|
const List<String> RESTRICTORS = const ['limit', 'offset'];
|
||||||
|
const Map<String, String> SORT_MODES = const {
|
||||||
|
'Descending': 'DESC',
|
||||||
|
'Ascending': 'ASC'
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: HasOne, HasMany
|
||||||
|
class PostgresOrmGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
|
/// If "true" (default), then field names will automatically be (de)serialized as snake_case.
|
||||||
|
final bool autoSnakeCaseNames;
|
||||||
|
|
||||||
|
/// If "true" (default), then
|
||||||
|
final bool autoIdAndDateFields;
|
||||||
|
|
||||||
|
const PostgresOrmGenerator(
|
||||||
|
{this.autoSnakeCaseNames: true, this.autoIdAndDateFields: true});
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<String> generateForAnnotatedElement(
|
||||||
|
Element element, ConstantReader annotation, BuildStep buildStep) async {
|
||||||
|
if (buildStep.inputId.path.contains('.orm.g.dart')) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (element is! ClassElement)
|
||||||
|
throw 'Only classes can be annotated with @ORM().';
|
||||||
|
var resolver = await buildStep.resolver;
|
||||||
|
var lib = await generateOrmLibrary(element.library, resolver, buildStep)
|
||||||
|
.then((l) => l.buildAst());
|
||||||
|
if (lib == null) return null;
|
||||||
|
return prettyToSource(lib);
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<LibraryBuilder> generateOrmLibrary(LibraryElement libraryElement,
|
||||||
|
Resolver resolver, BuildStep buildStep) async {
|
||||||
|
var lib = new LibraryBuilder();
|
||||||
|
lib.addDirective(new ImportBuilder('dart:async'));
|
||||||
|
lib.addDirective(new ImportBuilder('package:angel_orm/angel_orm.dart'));
|
||||||
|
lib.addDirective(new ImportBuilder('package:postgres/postgres.dart'));
|
||||||
|
lib.addDirective(new ImportBuilder(p.basename(buildStep.inputId.path)));
|
||||||
|
var elements = libraryElement.definingCompilationUnit.unit.declarations
|
||||||
|
.where((el) => el is ClassDeclaration);
|
||||||
|
Map<ClassElement, PostgresBuildContext> contexts = {};
|
||||||
|
List<String> done = [];
|
||||||
|
List<String> imported = [];
|
||||||
|
|
||||||
|
for (ClassDeclaration element in elements) {
|
||||||
|
if (!done.contains(element.name)) {
|
||||||
|
var ann = ormTypeChecker.firstAnnotationOf(element.element);
|
||||||
|
if (ann != null) {
|
||||||
|
var ctx = contexts[element.element] = await buildContext(
|
||||||
|
element.element,
|
||||||
|
reviveOrm(new ConstantReader(ann)),
|
||||||
|
buildStep,
|
||||||
|
resolver,
|
||||||
|
autoSnakeCaseNames != false,
|
||||||
|
autoIdAndDateFields != false);
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
var field = ctx.resolveRelationshipField(name);
|
||||||
|
var uri = field.type.element.source.uri;
|
||||||
|
var pathName = p
|
||||||
|
.basenameWithoutExtension(p.basenameWithoutExtension(uri.path));
|
||||||
|
var source =
|
||||||
|
'$pathName.orm.g.dart'; //uri.resolve('$pathName.orm.g.dart').toString();
|
||||||
|
// TODO: Find good way to source url...
|
||||||
|
source = new ReCase(relationship.isList
|
||||||
|
? relationship.modelType.name
|
||||||
|
: field.type.name)
|
||||||
|
.snakeCase +
|
||||||
|
'.orm.g.dart';
|
||||||
|
|
||||||
|
if (!imported.contains(source)) {
|
||||||
|
lib.addDirective(new ImportBuilder(source));
|
||||||
|
imported.add(source);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (contexts.isEmpty) return null;
|
||||||
|
|
||||||
|
done.clear();
|
||||||
|
for (var element in contexts.keys) {
|
||||||
|
if (!done.contains(element.name)) {
|
||||||
|
var ctx = contexts[element];
|
||||||
|
lib.addMember(await buildQueryClass(ctx));
|
||||||
|
lib.addMember(buildWhereClass(ctx));
|
||||||
|
done.add(element.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return lib;
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<ClassBuilder> buildQueryClass(PostgresBuildContext ctx) async {
|
||||||
|
var clazz = new ClassBuilder(ctx.queryClassName);
|
||||||
|
|
||||||
|
// Add constructor + field
|
||||||
|
var connection = reference('connection');
|
||||||
|
|
||||||
|
// Add _unions
|
||||||
|
clazz.addField(varFinal('_unions',
|
||||||
|
value: map({}),
|
||||||
|
type: new TypeBuilder('Map',
|
||||||
|
genericTypes: [ctx.queryClassBuilder, lib$core.bool])));
|
||||||
|
|
||||||
|
var unions = <String, bool>{'union': false, 'unionAll': true};
|
||||||
|
unions.forEach((name, all) {
|
||||||
|
var meth = new MethodBuilder(name, returnType: lib$core.$void);
|
||||||
|
meth.addPositional(parameter('query', [ctx.queryClassBuilder]));
|
||||||
|
meth.addStatement(
|
||||||
|
literal(all).asAssign(reference('_unions')[reference('query')]));
|
||||||
|
clazz.addMethod(meth);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add _sortMode
|
||||||
|
clazz.addField(varField('_sortKey', type: lib$core.String));
|
||||||
|
clazz.addField(varField('_sortMode', type: lib$core.String));
|
||||||
|
|
||||||
|
SORT_MODES.keys.forEach((sort) {
|
||||||
|
var m = new MethodBuilder('sort$sort', returnType: lib$core.$void);
|
||||||
|
m.addPositional(parameter('key', [lib$core.String]));
|
||||||
|
m.addStatement(literal(sort).asAssign(reference('_sortMode')));
|
||||||
|
m.addStatement((literal(ctx.prefix) + reference('key'))
|
||||||
|
.parentheses()
|
||||||
|
.asAssign(reference('_sortKey')));
|
||||||
|
clazz.addMethod(m);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add limit, offset
|
||||||
|
for (var restrictor in RESTRICTORS) {
|
||||||
|
clazz.addField(varField(restrictor, type: lib$core.int));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add and, or, not
|
||||||
|
for (var relation in RELATIONS) {
|
||||||
|
clazz.addField(varFinal('_$relation',
|
||||||
|
type: new TypeBuilder('List', genericTypes: [ctx.whereClassBuilder]),
|
||||||
|
value: list([])));
|
||||||
|
var relationMethod =
|
||||||
|
new MethodBuilder(relation, returnType: lib$core.$void);
|
||||||
|
relationMethod
|
||||||
|
.addPositional(parameter('selector', [ctx.whereClassBuilder]));
|
||||||
|
relationMethod.addStatement(
|
||||||
|
reference('_$relation').invoke('add', [reference('selector')]));
|
||||||
|
clazz.addMethod(relationMethod);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add _buildSelectQuery()
|
||||||
|
|
||||||
|
// Add where...
|
||||||
|
clazz.addField(varFinal('where',
|
||||||
|
type: new TypeBuilder(ctx.whereClassName),
|
||||||
|
value: new TypeBuilder(ctx.whereClassName).newInstance([])));
|
||||||
|
|
||||||
|
// Add toSql()...
|
||||||
|
clazz.addMethod(await buildToSqlMethod(ctx));
|
||||||
|
|
||||||
|
// Add parseRow()...
|
||||||
|
clazz.addMethod(await buildParseRowMethod(ctx), asStatic: true);
|
||||||
|
|
||||||
|
// Add get()...
|
||||||
|
clazz.addMethod(buildGetMethod(ctx));
|
||||||
|
|
||||||
|
// Add getOne()...
|
||||||
|
clazz.addMethod(buildGetOneMethod(ctx), asStatic: true);
|
||||||
|
|
||||||
|
// Add update()...
|
||||||
|
clazz.addMethod(buildUpdateMethod(ctx));
|
||||||
|
|
||||||
|
// Add delete()...
|
||||||
|
clazz.addMethod(buildDeleteMethod(ctx));
|
||||||
|
|
||||||
|
// Add deleteOne()...
|
||||||
|
clazz.addMethod(buildDeleteOneMethod(ctx), asStatic: true);
|
||||||
|
|
||||||
|
// Add insert()...
|
||||||
|
clazz.addMethod(buildInsertMethod(ctx), asStatic: true);
|
||||||
|
|
||||||
|
// Add insertX()
|
||||||
|
clazz.addMethod(buildInsertModelMethod(ctx), asStatic: true);
|
||||||
|
|
||||||
|
// Add updateX()
|
||||||
|
clazz.addMethod(buildUpdateModelMethod(ctx), asStatic: true);
|
||||||
|
|
||||||
|
// Add getAll() => new TodoQuery().get();
|
||||||
|
clazz.addMethod(
|
||||||
|
new MethodBuilder('getAll',
|
||||||
|
returnType: new TypeBuilder('Stream',
|
||||||
|
genericTypes: [ctx.modelClassBuilder]),
|
||||||
|
returns: ctx.queryClassBuilder
|
||||||
|
.newInstance([]).invoke('get', [connection]))
|
||||||
|
..addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder])),
|
||||||
|
asStatic: true);
|
||||||
|
|
||||||
|
return clazz;
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<String> computeSelector(PostgresBuildContext ctx) async {
|
||||||
|
var buf = new StringBuffer();
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
// Add all regular fields
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
var name = ctx.resolveFieldName(f.name);
|
||||||
|
buf.write(ctx.prefix + "$name");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add all relationship fields...
|
||||||
|
for (var name in ctx.relationships.keys) {
|
||||||
|
// Should only run when a JOIN is performed, i.e. singular
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
if (relationship.isSingular) {
|
||||||
|
var modelTypeContext = await relationship.modelTypeContext;
|
||||||
|
modelTypeContext.fields.forEach((f) {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
var name = modelTypeContext.resolveFieldName(f.name);
|
||||||
|
buf.write('${relationship.foreignTable}.$name');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<MethodBuilder> buildToSqlMethod(PostgresBuildContext ctx) async {
|
||||||
|
var meth = new MethodBuilder('toSql', returnType: lib$core.String);
|
||||||
|
meth.addPositional(parameter('prefix', [lib$core.String]).asOptional());
|
||||||
|
var buf = reference('buf');
|
||||||
|
meth.addStatement(
|
||||||
|
varField('buf', value: lib$core.StringBuffer.newInstance([])));
|
||||||
|
|
||||||
|
// Write prefix, or default to SELECT
|
||||||
|
var prefix = reference('prefix');
|
||||||
|
meth.addStatement(buf.invoke('write', [
|
||||||
|
prefix.notEquals(literal(null)).ternary(prefix,
|
||||||
|
literal('SELECT ${await computeSelector(ctx)} FROM "${ctx.tableName}"'))
|
||||||
|
]));
|
||||||
|
|
||||||
|
var relationsIfThen = ifThen(prefix.equals(literal(null)));
|
||||||
|
|
||||||
|
// Apply relationships
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
if (relationship.isSingular) {
|
||||||
|
String b = ' LEFT OUTER JOIN ${relationship.foreignTable} ON ${ctx
|
||||||
|
.tableName}.${relationship.localKey} = ${relationship
|
||||||
|
.foreignTable}.${relationship.foreignKey}';
|
||||||
|
relationsIfThen.addStatement(buf.invoke('write', [literal(b)]));
|
||||||
|
}
|
||||||
|
|
||||||
|
// A join-based solution won't work for hasMany and co.
|
||||||
|
/*else {
|
||||||
|
String b = ' LEFT OUTER JOIN ${relationship.foreignTable} ON ${ctx
|
||||||
|
.tableName}.${relationship.localKey} = ${relationship
|
||||||
|
.foreignTable}.${relationship.foreignKey}';
|
||||||
|
relationsIfThen.addStatement(buf.invoke('write', [literal(b)]));
|
||||||
|
}*/
|
||||||
|
});
|
||||||
|
|
||||||
|
meth.addStatement(relationsIfThen);
|
||||||
|
|
||||||
|
meth.addStatement(varField('whereClause',
|
||||||
|
value: reference('where').invoke('toWhereClause', [])));
|
||||||
|
|
||||||
|
var whereClause = reference('whereClause');
|
||||||
|
|
||||||
|
meth.addStatement(ifThen(whereClause.notEquals(literal(null)), [
|
||||||
|
buf.invoke('write', [literal(' ') + whereClause])
|
||||||
|
]));
|
||||||
|
|
||||||
|
for (var relation in RELATIONS) {
|
||||||
|
var ref = reference('_$relation'),
|
||||||
|
x = reference('x'),
|
||||||
|
whereClause = reference('whereClause');
|
||||||
|
var upper = relation.toUpperCase();
|
||||||
|
var closure = new MethodBuilder.closure();
|
||||||
|
closure.addPositional(parameter('x'));
|
||||||
|
closure.addStatement(varField('whereClause',
|
||||||
|
value: x.invoke('toWhereClause', [],
|
||||||
|
namedArguments: {'keyword': literal(false)})));
|
||||||
|
closure.addStatement(ifThen(whereClause.notEquals(literal(null)), [
|
||||||
|
buf.invoke('write', [literal(' $upper (') + whereClause + literal(')')])
|
||||||
|
]));
|
||||||
|
|
||||||
|
meth.addStatement(ref.invoke('forEach', [closure]));
|
||||||
|
}
|
||||||
|
|
||||||
|
var ifNoPrefix = ifThen(reference('prefix').equals(literal(null)));
|
||||||
|
|
||||||
|
for (var restrictor in RESTRICTORS) {
|
||||||
|
var ref = reference(restrictor);
|
||||||
|
var upper = restrictor.toUpperCase();
|
||||||
|
ifNoPrefix.addStatement(ifThen(ref.notEquals(literal(null)), [
|
||||||
|
buf.invoke('write', [literal(' $upper ') + ref.invoke('toString', [])])
|
||||||
|
]));
|
||||||
|
}
|
||||||
|
|
||||||
|
var sortMode = reference('_sortMode');
|
||||||
|
|
||||||
|
SORT_MODES.forEach((k, sort) {
|
||||||
|
ifNoPrefix.addStatement(ifThen(sortMode.equals(literal(k)), [
|
||||||
|
buf.invoke('write', [
|
||||||
|
literal(' ORDER BY "') + reference('_sortKey') + literal('" $sort')
|
||||||
|
])
|
||||||
|
]));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add unions
|
||||||
|
var unionClosure = new MethodBuilder.closure();
|
||||||
|
unionClosure.addPositional(parameter('query'));
|
||||||
|
unionClosure.addPositional(parameter('all'));
|
||||||
|
unionClosure.addStatement(buf.invoke('write', [literal(' UNION')]));
|
||||||
|
unionClosure.addStatement(ifThen(reference('all'), [
|
||||||
|
buf.invoke('write', [literal(' ALL')])
|
||||||
|
]));
|
||||||
|
unionClosure.addStatement(buf.invoke('write', [literal(' (')]));
|
||||||
|
unionClosure.addStatement(varField('sql',
|
||||||
|
value: reference('query').invoke('toSql', []).invoke(
|
||||||
|
'replaceAll', [literal(';'), literal('')])));
|
||||||
|
unionClosure
|
||||||
|
.addStatement(buf.invoke('write', [reference('sql') + literal(')')]));
|
||||||
|
|
||||||
|
ifNoPrefix
|
||||||
|
.addStatement(reference('_unions').invoke('forEach', [unionClosure]));
|
||||||
|
|
||||||
|
ifNoPrefix.addStatement(buf.invoke('write', [literal(';')]));
|
||||||
|
|
||||||
|
meth.addStatement(ifNoPrefix);
|
||||||
|
meth.addStatement(buf.invoke('toString', []).asReturn());
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<MethodBuilder> buildParseRowMethod(PostgresBuildContext ctx) async {
|
||||||
|
var meth = new MethodBuilder('parseRow', returnType: ctx.modelClassBuilder);
|
||||||
|
meth.addPositional(parameter('row', [lib$core.List]));
|
||||||
|
//meth.addStatement(lib$core.print.call(
|
||||||
|
// [literal('ROW MAP: ') + reference('row').invoke('toString', [])]));
|
||||||
|
var row = reference('row');
|
||||||
|
|
||||||
|
// We want to create a Map using the SQL row.
|
||||||
|
Map<String, ExpressionBuilder> data = {};
|
||||||
|
|
||||||
|
int i = 0;
|
||||||
|
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
var name = ctx.resolveFieldName(field.name);
|
||||||
|
var rowKey = row[literal(i++)];
|
||||||
|
|
||||||
|
if (field.name == 'id' && ctx.shimmed.containsKey('id')) {
|
||||||
|
data[name] = rowKey.invoke('toString', []);
|
||||||
|
} else
|
||||||
|
data[name] = rowKey;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Invoke fromJson()
|
||||||
|
var result = reference('result');
|
||||||
|
meth.addStatement(varField('result',
|
||||||
|
value: ctx.modelClassBuilder
|
||||||
|
.newInstance([map(data)], constructor: 'fromJson')));
|
||||||
|
|
||||||
|
// For each relationship, try to parse
|
||||||
|
for (var name in ctx.relationships.keys) {
|
||||||
|
int minIndex = i;
|
||||||
|
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
var modelTypeContext = await relationship.modelTypeContext;
|
||||||
|
var rc = new ReCase(relationship.isList
|
||||||
|
? relationship.modelType.name
|
||||||
|
: relationship.dartType.name);
|
||||||
|
var relationshipQuery = new TypeBuilder('${rc.pascalCase}Query');
|
||||||
|
List<ExpressionBuilder> relationshipRow = [];
|
||||||
|
|
||||||
|
modelTypeContext.fields.forEach((f) {
|
||||||
|
relationshipRow.add(row[literal(i++)]);
|
||||||
|
});
|
||||||
|
|
||||||
|
meth.addStatement(ifThen(row.property('length') > literal(minIndex), [
|
||||||
|
relationshipQuery.invoke(
|
||||||
|
'parseRow', [list(relationshipRow)]).asAssign(result.property(name))
|
||||||
|
]));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then, call a .fromJson() constructor
|
||||||
|
meth.addStatement(result.asReturn());
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
void _invokeStreamClosure(
|
||||||
|
PostgresBuildContext ctx, ExpressionBuilder future, MethodBuilder meth) {
|
||||||
|
var ctrl = reference('ctrl');
|
||||||
|
// Invoke query...
|
||||||
|
var catchError = ctrl.property('addError');
|
||||||
|
var then = new MethodBuilder.closure(modifier: MethodModifier.asAsync)
|
||||||
|
..addPositional(parameter('rows'));
|
||||||
|
|
||||||
|
var forEachClosure =
|
||||||
|
new MethodBuilder.closure(modifier: MethodModifier.asAsync);
|
||||||
|
forEachClosure.addPositional(parameter('row'));
|
||||||
|
forEachClosure.addStatement(varField('parsed',
|
||||||
|
value: reference('parseRow').call([reference('row')])));
|
||||||
|
_applyRelationshipsToOutput(
|
||||||
|
ctx, reference('parsed'), reference('row'), forEachClosure);
|
||||||
|
forEachClosure.addStatement(reference('parsed').asReturn());
|
||||||
|
|
||||||
|
then.addStatement(varField('futures',
|
||||||
|
value: reference('rows').invoke('map', [forEachClosure])));
|
||||||
|
then.addStatement(varField('output',
|
||||||
|
value:
|
||||||
|
lib$async.Future.invoke('wait', [reference('futures')]).asAwait()));
|
||||||
|
then.addStatement(
|
||||||
|
reference('output').invoke('forEach', [ctrl.property('add')]));
|
||||||
|
|
||||||
|
then.addStatement(ctrl.invoke('close', []));
|
||||||
|
meth.addStatement(
|
||||||
|
future.invoke('then', [then]).invoke('catchError', [catchError]));
|
||||||
|
meth.addStatement(ctrl.property('stream').asReturn());
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildGetMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('get',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Stream', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
var streamController = new TypeBuilder('StreamController',
|
||||||
|
genericTypes: [ctx.modelClassBuilder]);
|
||||||
|
meth.addStatement(varField('ctrl',
|
||||||
|
type: streamController, value: streamController.newInstance([])));
|
||||||
|
|
||||||
|
var future =
|
||||||
|
reference('connection').invoke('query', [reference('toSql').call([])]);
|
||||||
|
_invokeStreamClosure(ctx, future, meth);
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildGetOneMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('getOne',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth.addPositional(parameter('id', [lib$core.int]));
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
|
||||||
|
var query = reference('query'),
|
||||||
|
whereId = query.property('where').property('id');
|
||||||
|
meth.addStatement(
|
||||||
|
varField('query', value: ctx.queryClassBuilder.newInstance([])));
|
||||||
|
meth.addStatement(whereId.invoke('equals', [reference('id')]));
|
||||||
|
|
||||||
|
// Return null on error
|
||||||
|
var catchErr = new MethodBuilder.closure(returns: literal(null));
|
||||||
|
catchErr.addPositional(parameter('_'));
|
||||||
|
|
||||||
|
meth.addStatement(query
|
||||||
|
.invoke('get', [reference('connection')])
|
||||||
|
.property('first')
|
||||||
|
.invoke('catchError', [catchErr])
|
||||||
|
.asReturn());
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
void _addAllNamed(MethodBuilder meth, PostgresBuildContext ctx) {
|
||||||
|
// Add all named params
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
if (field.name != 'id') {
|
||||||
|
var p = new ParameterBuilder(field.name,
|
||||||
|
type: new TypeBuilder(field.type.name));
|
||||||
|
var column = ctx.columnInfo[field.name];
|
||||||
|
if (column?.defaultValue != null)
|
||||||
|
p = p.asOptional(literal(column.defaultValue));
|
||||||
|
meth.addNamed(p);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void _addReturning(StringBuffer buf, PostgresBuildContext ctx) {
|
||||||
|
buf.write(' RETURNING ');
|
||||||
|
int i = 0;
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
var name = ctx.resolveFieldName(field.name);
|
||||||
|
buf.write('"$name"');
|
||||||
|
});
|
||||||
|
|
||||||
|
buf.write(';');
|
||||||
|
}
|
||||||
|
|
||||||
|
void _ensureDates(MethodBuilder meth, PostgresBuildContext ctx) {
|
||||||
|
if (ctx.fields.any((f) => f.name == 'createdAt' || f.name == 'updatedAt')) {
|
||||||
|
meth.addStatement(varField('__ormNow__',
|
||||||
|
value: lib$core.DateTime.newInstance([], constructor: 'now')));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, ExpressionBuilder> _buildSubstitutionValues(
|
||||||
|
PostgresBuildContext ctx) {
|
||||||
|
Map<String, ExpressionBuilder> substitutionValues = {};
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
if (field.name == 'id')
|
||||||
|
return;
|
||||||
|
else if (field.name == 'createdAt' || field.name == 'updatedAt') {
|
||||||
|
var ref = reference(field.name);
|
||||||
|
substitutionValues[field.name] =
|
||||||
|
ref.notEquals(literal(null)).ternary(ref, reference('__ormNow__'));
|
||||||
|
} else
|
||||||
|
substitutionValues[field.name] = reference(field.name);
|
||||||
|
});
|
||||||
|
return substitutionValues;
|
||||||
|
}
|
||||||
|
|
||||||
|
ExpressionBuilder _executeQuery(ExpressionBuilder queryString,
|
||||||
|
MethodBuilder meth, Map<String, ExpressionBuilder> substitutionValues) {
|
||||||
|
var connection = reference('connection');
|
||||||
|
var query = queryString;
|
||||||
|
return connection.invoke('query', [query],
|
||||||
|
namedArguments: {'substitutionValues': map(substitutionValues)});
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildUpdateMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('update',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Stream', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
_addAllNamed(meth, ctx);
|
||||||
|
|
||||||
|
var buf = new StringBuffer('UPDATE "${ctx.tableName}" SET (');
|
||||||
|
int i = 0;
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
if (field.name == 'id')
|
||||||
|
return;
|
||||||
|
else {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
var key = ctx.resolveFieldName(field.name);
|
||||||
|
buf.write('"$key"');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
buf.write(') = (');
|
||||||
|
i = 0;
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
if (field.name == 'id')
|
||||||
|
return;
|
||||||
|
else {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
buf.write('@${field.name}');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
buf.write(') ');
|
||||||
|
|
||||||
|
var $buf = reference('buf');
|
||||||
|
var whereClause = reference('whereClause');
|
||||||
|
meth.addStatement(varField('buf',
|
||||||
|
value: lib$core.StringBuffer.newInstance([literal(buf.toString())])));
|
||||||
|
meth.addStatement(varField('whereClause',
|
||||||
|
value: reference('where').invoke('toWhereClause', [])));
|
||||||
|
|
||||||
|
meth.addStatement(ifThen(whereClause.notEquals(literal(null)), [
|
||||||
|
$buf.invoke('write', [whereClause])
|
||||||
|
]));
|
||||||
|
|
||||||
|
var buf2 = new StringBuffer();
|
||||||
|
_addReturning(buf2, ctx);
|
||||||
|
_ensureDates(meth, ctx);
|
||||||
|
var substitutionValues = _buildSubstitutionValues(ctx);
|
||||||
|
|
||||||
|
var ctrlType = new TypeBuilder('StreamController',
|
||||||
|
genericTypes: [ctx.modelClassBuilder]);
|
||||||
|
meth.addStatement(varField('ctrl', value: ctrlType.newInstance([])));
|
||||||
|
var result = _executeQuery(
|
||||||
|
$buf.invoke('toString', []) + literal(buf2.toString()),
|
||||||
|
meth,
|
||||||
|
substitutionValues);
|
||||||
|
_invokeStreamClosure(ctx, result, meth);
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildDeleteMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('delete',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Stream', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
|
||||||
|
var litBuf = new StringBuffer();
|
||||||
|
_addReturning(litBuf, ctx);
|
||||||
|
|
||||||
|
var streamController = new TypeBuilder('StreamController',
|
||||||
|
genericTypes: [ctx.modelClassBuilder]);
|
||||||
|
meth.addStatement(varField('ctrl',
|
||||||
|
type: streamController, value: streamController.newInstance([])));
|
||||||
|
|
||||||
|
var future = reference('connection').invoke('query', [
|
||||||
|
reference('toSql').call([literal('DELETE FROM "${ctx.tableName}"')]) +
|
||||||
|
literal(litBuf.toString())
|
||||||
|
]);
|
||||||
|
_invokeStreamClosure(ctx, future, meth);
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildDeleteOneMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('deleteOne',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]))
|
||||||
|
..addPositional(parameter('id', [lib$core.int]))
|
||||||
|
..addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
|
||||||
|
var id = reference('id'),
|
||||||
|
connection = reference('connection'),
|
||||||
|
query = reference('query');
|
||||||
|
meth.addStatement(
|
||||||
|
varField('query', value: ctx.queryClassBuilder.newInstance([])));
|
||||||
|
meth.addStatement(
|
||||||
|
query.property('where').property('id').invoke('equals', [id]));
|
||||||
|
meth.addStatement(
|
||||||
|
query.invoke('delete', [connection]).property('first').asReturn());
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildInsertMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('insert',
|
||||||
|
modifier: MethodModifier.asAsync,
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
|
||||||
|
// Add all named params
|
||||||
|
_addAllNamed(meth, ctx);
|
||||||
|
|
||||||
|
var buf = new StringBuffer('INSERT INTO "${ctx.tableName}" (');
|
||||||
|
int i = 0;
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
if (field.name == 'id')
|
||||||
|
return;
|
||||||
|
else {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
var key = ctx.resolveFieldName(field.name);
|
||||||
|
buf.write('"$key"');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
buf.write(') VALUES (');
|
||||||
|
i = 0;
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
if (field.name == 'id')
|
||||||
|
return;
|
||||||
|
else {
|
||||||
|
if (i++ > 0) buf.write(', ');
|
||||||
|
buf.write('@${field.name}');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
buf.write(')');
|
||||||
|
// meth.addStatement(lib$core.print.call([literal(buf.toString())]));
|
||||||
|
|
||||||
|
_addReturning(buf, ctx);
|
||||||
|
_ensureDates(meth, ctx);
|
||||||
|
|
||||||
|
var substitutionValues = _buildSubstitutionValues(ctx);
|
||||||
|
|
||||||
|
var connection = reference('connection');
|
||||||
|
var query = literal(buf.toString());
|
||||||
|
var result = reference('result'), output = reference('output');
|
||||||
|
meth.addStatement(varField('result',
|
||||||
|
value: connection.invoke('query', [
|
||||||
|
query
|
||||||
|
], namedArguments: {
|
||||||
|
'substitutionValues': map(substitutionValues)
|
||||||
|
}).asAwait()));
|
||||||
|
|
||||||
|
meth.addStatement(varField('output',
|
||||||
|
value: reference('parseRow').call([result[literal(0)]])));
|
||||||
|
|
||||||
|
_applyRelationshipsToOutput(ctx, output, result[literal(0)], meth);
|
||||||
|
|
||||||
|
meth.addStatement(output.asReturn());
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
void _applyRelationshipsToOutput(PostgresBuildContext ctx,
|
||||||
|
ExpressionBuilder output, ExpressionBuilder row, MethodBuilder meth) {
|
||||||
|
// Every relationship should fill itself in with a query
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
var rc = new ReCase(relationship.isList
|
||||||
|
? relationship.modelType.name
|
||||||
|
: relationship.dartType.name);
|
||||||
|
var type = new TypeBuilder('${rc.pascalCase}Query');
|
||||||
|
|
||||||
|
// Resolve index within row...
|
||||||
|
bool matched = false;
|
||||||
|
int col = 0;
|
||||||
|
for (var field in ctx.fields) {
|
||||||
|
if (field is RelationshipConstraintField &&
|
||||||
|
field.originalName == name) {
|
||||||
|
matched = true;
|
||||||
|
break;
|
||||||
|
} else
|
||||||
|
col++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!matched) {
|
||||||
|
matched = ctx.resolveRelationshipField(name) != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!matched)
|
||||||
|
throw 'Couldn\'t resolve row index for relationship "${name}".';
|
||||||
|
|
||||||
|
var idAsInt = row[literal(col)];
|
||||||
|
|
||||||
|
if (relationship.isSingular) {
|
||||||
|
if (relationship.isBelongsTo) {
|
||||||
|
meth.addStatement(type
|
||||||
|
.invoke('getOne', [idAsInt, reference('connection')])
|
||||||
|
.asAwait()
|
||||||
|
.asAssign(output.property(name)));
|
||||||
|
} else {
|
||||||
|
var query = reference('${rc.camelCase}Query');
|
||||||
|
meth.addStatement(
|
||||||
|
varField('${rc.camelCase}Query', value: type.newInstance([])));
|
||||||
|
// Set id to row[0]
|
||||||
|
meth.addStatement(query
|
||||||
|
.property('where')
|
||||||
|
.property('id')
|
||||||
|
.invoke('equals', [row[literal(0)]]));
|
||||||
|
var fetched = query
|
||||||
|
.invoke('get', [reference('connection')])
|
||||||
|
.property('first')
|
||||||
|
.invoke('catchError', [
|
||||||
|
new MethodBuilder.closure(returns: literal(null))
|
||||||
|
..addPositional(parameter('_'))
|
||||||
|
])
|
||||||
|
.asAwait();
|
||||||
|
meth.addStatement(fetched.asAssign(output.property(name)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var query = reference('${rc.camelCase}Query');
|
||||||
|
ExpressionBuilder fetched;
|
||||||
|
|
||||||
|
if (relationship.isBelongsTo) {
|
||||||
|
meth.addStatement(
|
||||||
|
varField('${rc.camelCase}Query', value: type.newInstance([])));
|
||||||
|
|
||||||
|
meth.addStatement(query
|
||||||
|
.property('where')
|
||||||
|
.property('id')
|
||||||
|
.invoke('equals', [idAsInt]));
|
||||||
|
fetched = query.invoke('get', [reference('connection')]).invoke(
|
||||||
|
'toList', []).asAwait();
|
||||||
|
meth.addStatement(output.property(name).invoke('addAll', [fetched]));
|
||||||
|
} else {
|
||||||
|
var query = reference('${rc.camelCase}Query');
|
||||||
|
meth.addStatement(
|
||||||
|
varField('${rc.camelCase}Query', value: type.newInstance([])));
|
||||||
|
// Compute correct `xId` field via foreignKey
|
||||||
|
var idCase = new ReCase(relationship.foreignKey);
|
||||||
|
|
||||||
|
// Set id to row[0]
|
||||||
|
meth.addStatement(query
|
||||||
|
.property('where')
|
||||||
|
.property(idCase.camelCase)
|
||||||
|
.invoke('equals', [row[literal(0)]]));
|
||||||
|
fetched = query.invoke('get', [reference('connection')]).invoke(
|
||||||
|
'toList', []).invoke('catchError', [
|
||||||
|
new MethodBuilder.closure(returns: list([]))
|
||||||
|
..addPositional(parameter('_'))
|
||||||
|
]).asAwait();
|
||||||
|
meth.addStatement(fetched.asAssign(output.property(name)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void _addRelationshipConstraintsNamed(
|
||||||
|
MethodBuilder m, PostgresBuildContext ctx) {
|
||||||
|
ctx.relationships.forEach((name, r) {
|
||||||
|
var relationship = ctx.populateRelationship(name);
|
||||||
|
|
||||||
|
if (relationship.isBelongsTo) {
|
||||||
|
var rc = new ReCase(relationship.localKey);
|
||||||
|
m.addNamed(parameter(rc.camelCase, [lib$core.int]));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildInsertModelMethod(PostgresBuildContext ctx) {
|
||||||
|
var rc = new ReCase(ctx.modelClassName);
|
||||||
|
var meth = new MethodBuilder('insert${rc.pascalCase}',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
meth.addPositional(parameter(rc.camelCase, [ctx.modelClassBuilder]));
|
||||||
|
_addRelationshipConstraintsNamed(meth, ctx);
|
||||||
|
|
||||||
|
Map<String, ExpressionBuilder> args = {};
|
||||||
|
var ref = reference(rc.camelCase);
|
||||||
|
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
if (f.name != 'id') {
|
||||||
|
args[f.name] = f is RelationshipConstraintField
|
||||||
|
? reference(f.name)
|
||||||
|
: ref.property(f.name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
meth.addStatement(ctx.queryClassBuilder
|
||||||
|
.invoke('insert', [reference('connection')], namedArguments: args)
|
||||||
|
.asReturn());
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildUpdateModelMethod(PostgresBuildContext ctx) {
|
||||||
|
var rc = new ReCase(ctx.modelClassName);
|
||||||
|
var meth = new MethodBuilder('update${rc.pascalCase}',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
|
||||||
|
meth.addPositional(
|
||||||
|
parameter('connection', [ctx.postgreSQLConnectionBuilder]));
|
||||||
|
meth.addPositional(parameter(rc.camelCase, [ctx.modelClassBuilder]));
|
||||||
|
|
||||||
|
// var query = new XQuery();
|
||||||
|
var ref = reference(rc.camelCase);
|
||||||
|
var query = reference('query');
|
||||||
|
meth.addStatement(
|
||||||
|
varField('query', value: ctx.queryClassBuilder.newInstance([])));
|
||||||
|
|
||||||
|
// query.where.id.equals(x.id);
|
||||||
|
meth.addStatement(query.property('where').property('id').invoke('equals', [
|
||||||
|
lib$core.int.invoke('parse', [ref.property('id')])
|
||||||
|
]));
|
||||||
|
|
||||||
|
// return query.update(connection, ...).first;
|
||||||
|
Map<String, ExpressionBuilder> args = {};
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
if (f.name != 'id') {
|
||||||
|
if (f is RelationshipConstraintField) {
|
||||||
|
// Need to int.parse the related id and pass it
|
||||||
|
var relation = ref.property(f.originalName);
|
||||||
|
var relationship = ctx.populateRelationship(f.originalName);
|
||||||
|
args[f.name] = lib$core.int
|
||||||
|
.invoke('parse', [relation.property(relationship.foreignKey)]);
|
||||||
|
} else
|
||||||
|
args[f.name] = ref.property(f.name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
var update =
|
||||||
|
query.invoke('update', [reference('connection')], namedArguments: args);
|
||||||
|
meth.addStatement(update.property('first').asReturn());
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
ClassBuilder buildWhereClass(PostgresBuildContext ctx) {
|
||||||
|
var clazz = new ClassBuilder(ctx.whereClassName);
|
||||||
|
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
TypeBuilder queryBuilderType;
|
||||||
|
List<ExpressionBuilder> args = [];
|
||||||
|
|
||||||
|
if (field.name == 'id') {
|
||||||
|
queryBuilderType = new TypeBuilder('NumericSqlExpressionBuilder',
|
||||||
|
genericTypes: [lib$core.int]);
|
||||||
|
} else {
|
||||||
|
switch (field.type.name) {
|
||||||
|
case 'String':
|
||||||
|
queryBuilderType = new TypeBuilder('StringSqlExpressionBuilder');
|
||||||
|
break;
|
||||||
|
case 'int':
|
||||||
|
queryBuilderType = new TypeBuilder('NumericSqlExpressionBuilder',
|
||||||
|
genericTypes: [lib$core.int]);
|
||||||
|
break;
|
||||||
|
case 'double':
|
||||||
|
queryBuilderType = new TypeBuilder('NumericSqlExpressionBuilder',
|
||||||
|
genericTypes: [new TypeBuilder('double')]);
|
||||||
|
break;
|
||||||
|
case 'num':
|
||||||
|
queryBuilderType = new TypeBuilder('NumericSqlExpressionBuilder');
|
||||||
|
break;
|
||||||
|
case 'bool':
|
||||||
|
queryBuilderType = new TypeBuilder('BooleanSqlExpressionBuilder');
|
||||||
|
break;
|
||||||
|
case 'DateTime':
|
||||||
|
queryBuilderType = new TypeBuilder('DateTimeSqlExpressionBuilder');
|
||||||
|
args.add(literal(
|
||||||
|
ctx.tableName + '.' + ctx.resolveFieldName(field.name)));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (queryBuilderType == null)
|
||||||
|
throw 'Could not resolve query builder type for field "${field
|
||||||
|
.name}" of type "${field.type.name}".';
|
||||||
|
clazz.addField(varFinal(field.name,
|
||||||
|
type: queryBuilderType, value: queryBuilderType.newInstance(args)));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create "toWhereClause()"
|
||||||
|
var toWhereClause =
|
||||||
|
new MethodBuilder('toWhereClause', returnType: lib$core.String);
|
||||||
|
toWhereClause.addNamed(parameter('keyword', [lib$core.bool]));
|
||||||
|
|
||||||
|
// List<String> expressions = [];
|
||||||
|
toWhereClause.addStatement(varFinal('expressions',
|
||||||
|
type: new TypeBuilder('List', genericTypes: [lib$core.String]),
|
||||||
|
value: list([])));
|
||||||
|
var expressions = reference('expressions');
|
||||||
|
|
||||||
|
// Add all expressions...
|
||||||
|
ctx.fields.forEach((field) {
|
||||||
|
var name = ctx.resolveFieldName(field.name);
|
||||||
|
var queryBuilder = reference(field.name);
|
||||||
|
var toAdd = dateTimeTypeChecker.isAssignableFromType(field.type)
|
||||||
|
? queryBuilder.invoke('compile', [])
|
||||||
|
: (literal('${ctx.tableName}.$name ') +
|
||||||
|
queryBuilder.invoke('compile', []));
|
||||||
|
|
||||||
|
toWhereClause.addStatement(ifThen(queryBuilder.property('hasValue'), [
|
||||||
|
expressions.invoke('add', [toAdd])
|
||||||
|
]));
|
||||||
|
});
|
||||||
|
|
||||||
|
var kw = reference('keyword')
|
||||||
|
.notEquals(literal(false))
|
||||||
|
.ternary(literal('WHERE '), literal(''))
|
||||||
|
.parentheses();
|
||||||
|
|
||||||
|
// return expressions.isEmpty ? null : ('WHERE ' + expressions.join(' AND '));
|
||||||
|
toWhereClause.addStatement(expressions
|
||||||
|
.property('isEmpty')
|
||||||
|
.ternary(literal(null),
|
||||||
|
(kw + expressions.invoke('join', [literal(' AND ')])).parentheses())
|
||||||
|
.asReturn());
|
||||||
|
|
||||||
|
clazz.addMethod(toWhereClause);
|
||||||
|
|
||||||
|
return clazz;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,275 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:analyzer/dart/constant/value.dart';
|
||||||
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
|
import 'package:analyzer/dart/element/type.dart';
|
||||||
|
import 'package:analyzer/src/generated/resolver.dart';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:angel_serialize_generator/context.dart';
|
||||||
|
import 'package:build/build.dart';
|
||||||
|
import 'package:code_builder/code_builder.dart';
|
||||||
|
import 'package:inflection/inflection.dart';
|
||||||
|
import 'package:recase/recase.dart';
|
||||||
|
import 'package:source_gen/source_gen.dart';
|
||||||
|
import 'build_context.dart';
|
||||||
|
|
||||||
|
class PostgresBuildContext extends BuildContext {
|
||||||
|
LibraryElement _libraryCache;
|
||||||
|
TypeProvider _typeProviderCache;
|
||||||
|
TypeBuilder _modelClassBuilder,
|
||||||
|
_queryClassBuilder,
|
||||||
|
_whereClassBuilder,
|
||||||
|
_postgresqlConnectionBuilder;
|
||||||
|
String _prefix;
|
||||||
|
final Map<String, Relationship> _populatedRelationships = {};
|
||||||
|
final Map<String, Column> columnInfo = {};
|
||||||
|
final Map<String, IndexType> indices = {};
|
||||||
|
final Map<String, Relationship> relationships = {};
|
||||||
|
final bool autoSnakeCaseNames, autoIdAndDateFields;
|
||||||
|
final String tableName;
|
||||||
|
final ORM ormAnnotation;
|
||||||
|
final BuildContext raw;
|
||||||
|
final Resolver resolver;
|
||||||
|
final BuildStep buildStep;
|
||||||
|
String primaryKeyName = 'id';
|
||||||
|
|
||||||
|
PostgresBuildContext._(
|
||||||
|
this.raw, this.ormAnnotation, this.resolver, this.buildStep,
|
||||||
|
{this.tableName, this.autoSnakeCaseNames, this.autoIdAndDateFields})
|
||||||
|
: super(raw.annotation,
|
||||||
|
originalClassName: raw.originalClassName,
|
||||||
|
sourceFilename: raw.sourceFilename);
|
||||||
|
|
||||||
|
static Future<PostgresBuildContext> create(BuildContext raw,
|
||||||
|
ORM ormAnnotation, Resolver resolver, BuildStep buildStep,
|
||||||
|
{String tableName,
|
||||||
|
bool autoSnakeCaseNames,
|
||||||
|
bool autoIdAndDateFields}) async {
|
||||||
|
var ctx = new PostgresBuildContext._(
|
||||||
|
raw,
|
||||||
|
ormAnnotation,
|
||||||
|
resolver,
|
||||||
|
buildStep,
|
||||||
|
tableName: tableName,
|
||||||
|
autoSnakeCaseNames: autoSnakeCaseNames,
|
||||||
|
autoIdAndDateFields: autoIdAndDateFields,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Library
|
||||||
|
ctx._libraryCache = await resolver.libraryFor(buildStep.inputId);
|
||||||
|
|
||||||
|
return ctx;
|
||||||
|
}
|
||||||
|
|
||||||
|
final List<FieldElement> fields = [], relationshipFields = [];
|
||||||
|
|
||||||
|
TypeBuilder get modelClassBuilder =>
|
||||||
|
_modelClassBuilder ??= new TypeBuilder(modelClassName);
|
||||||
|
|
||||||
|
TypeBuilder get queryClassBuilder =>
|
||||||
|
_queryClassBuilder ??= new TypeBuilder(queryClassName);
|
||||||
|
|
||||||
|
TypeBuilder get whereClassBuilder =>
|
||||||
|
_whereClassBuilder ??= new TypeBuilder(whereClassName);
|
||||||
|
|
||||||
|
TypeBuilder get postgreSQLConnectionBuilder =>
|
||||||
|
_postgresqlConnectionBuilder ??= new TypeBuilder('PostgreSQLConnection');
|
||||||
|
|
||||||
|
String get prefix {
|
||||||
|
if (_prefix != null) return _prefix;
|
||||||
|
if (relationships.isEmpty)
|
||||||
|
return _prefix = '';
|
||||||
|
else
|
||||||
|
return _prefix = tableName + '.';
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, String> get aliases => raw.aliases;
|
||||||
|
|
||||||
|
Map<String, bool> get shimmed => raw.shimmed;
|
||||||
|
|
||||||
|
String get sourceFilename => raw.sourceFilename;
|
||||||
|
|
||||||
|
String get modelClassName => raw.modelClassName;
|
||||||
|
|
||||||
|
String get originalClassName => raw.originalClassName;
|
||||||
|
|
||||||
|
String get queryClassName => modelClassName + 'Query';
|
||||||
|
String get whereClassName => queryClassName + 'Where';
|
||||||
|
|
||||||
|
LibraryElement get library => _libraryCache;
|
||||||
|
|
||||||
|
TypeProvider get typeProvider =>
|
||||||
|
_typeProviderCache ??= library.context.typeProvider;
|
||||||
|
|
||||||
|
FieldElement resolveRelationshipField(String name) =>
|
||||||
|
relationshipFields.firstWhere((f) => f.name == name, orElse: () => null);
|
||||||
|
|
||||||
|
PopulatedRelationship populateRelationship(String name) {
|
||||||
|
return _populatedRelationships.putIfAbsent(name, () {
|
||||||
|
var f = raw.fields.firstWhere((f) => f.name == name);
|
||||||
|
var relationship = relationships[name];
|
||||||
|
DartType refType = f.type;
|
||||||
|
|
||||||
|
if (refType.isAssignableTo(typeProvider.listType) ||
|
||||||
|
refType.name == 'List') {
|
||||||
|
var iType = refType as InterfaceType;
|
||||||
|
|
||||||
|
if (iType.typeArguments.isEmpty)
|
||||||
|
throw 'Relationship "${f.name}" cannot be modeled as a generic List.';
|
||||||
|
|
||||||
|
refType = iType.typeArguments.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
var typeName = refType.name.startsWith('_')
|
||||||
|
? refType.name.substring(1)
|
||||||
|
: refType.name;
|
||||||
|
var rc = new ReCase(typeName);
|
||||||
|
|
||||||
|
if (relationship.type == RelationshipType.HAS_ONE ||
|
||||||
|
relationship.type == RelationshipType.HAS_MANY) {
|
||||||
|
//print('Has many $tableName');
|
||||||
|
var single = singularize(tableName);
|
||||||
|
var foreignKey = relationship.foreignTable ??
|
||||||
|
(autoSnakeCaseNames != false ? '${single}_id' : '${single}Id');
|
||||||
|
var localKey = relationship.localKey ?? 'id';
|
||||||
|
var foreignTable = relationship.foreignTable ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? pluralize(rc.snakeCase)
|
||||||
|
: pluralize(typeName));
|
||||||
|
return new PopulatedRelationship(
|
||||||
|
relationship.type,
|
||||||
|
f.name,
|
||||||
|
f.type,
|
||||||
|
buildStep,
|
||||||
|
resolver,
|
||||||
|
autoSnakeCaseNames,
|
||||||
|
autoIdAndDateFields,
|
||||||
|
relationship.type == RelationshipType.HAS_ONE,
|
||||||
|
typeProvider,
|
||||||
|
localKey: localKey,
|
||||||
|
foreignKey: foreignKey,
|
||||||
|
foreignTable: foreignTable,
|
||||||
|
cascadeOnDelete: relationship.cascadeOnDelete);
|
||||||
|
} else if (relationship.type == RelationshipType.BELONGS_TO ||
|
||||||
|
relationship.type == RelationshipType.BELONGS_TO_MANY) {
|
||||||
|
var localKey = relationship.localKey ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? '${rc.snakeCase}_id'
|
||||||
|
: '${typeName}Id');
|
||||||
|
var foreignKey = relationship.foreignKey ?? 'id';
|
||||||
|
var foreignTable = relationship.foreignTable ??
|
||||||
|
(autoSnakeCaseNames != false
|
||||||
|
? pluralize(rc.snakeCase)
|
||||||
|
: pluralize(typeName));
|
||||||
|
return new PopulatedRelationship(
|
||||||
|
relationship.type,
|
||||||
|
f.name,
|
||||||
|
f.type,
|
||||||
|
buildStep,
|
||||||
|
resolver,
|
||||||
|
autoSnakeCaseNames,
|
||||||
|
autoIdAndDateFields,
|
||||||
|
relationship.type == RelationshipType.BELONGS_TO,
|
||||||
|
typeProvider,
|
||||||
|
localKey: localKey,
|
||||||
|
foreignKey: foreignKey,
|
||||||
|
foreignTable: foreignTable,
|
||||||
|
cascadeOnDelete: relationship.cascadeOnDelete);
|
||||||
|
} else
|
||||||
|
throw new UnsupportedError(
|
||||||
|
'Invalid relationship type: ${relationship.type}');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class PopulatedRelationship extends Relationship {
|
||||||
|
bool _isList;
|
||||||
|
DartType _modelType;
|
||||||
|
PostgresBuildContext _modelTypeContext;
|
||||||
|
DartObject _modelTypeORM;
|
||||||
|
final String originalName;
|
||||||
|
final DartType dartType;
|
||||||
|
final BuildStep buildStep;
|
||||||
|
final Resolver resolver;
|
||||||
|
final bool autoSnakeCaseNames, autoIdAndDateFields;
|
||||||
|
final bool isSingular;
|
||||||
|
final TypeProvider typeProvider;
|
||||||
|
|
||||||
|
PopulatedRelationship(
|
||||||
|
int type,
|
||||||
|
this.originalName,
|
||||||
|
this.dartType,
|
||||||
|
this.buildStep,
|
||||||
|
this.resolver,
|
||||||
|
this.autoSnakeCaseNames,
|
||||||
|
this.autoIdAndDateFields,
|
||||||
|
this.isSingular,
|
||||||
|
this.typeProvider,
|
||||||
|
{String localKey,
|
||||||
|
String foreignKey,
|
||||||
|
String foreignTable,
|
||||||
|
bool cascadeOnDelete})
|
||||||
|
: super(type,
|
||||||
|
localKey: localKey,
|
||||||
|
foreignKey: foreignKey,
|
||||||
|
foreignTable: foreignTable,
|
||||||
|
cascadeOnDelete: cascadeOnDelete);
|
||||||
|
|
||||||
|
bool get isBelongsTo =>
|
||||||
|
type == RelationshipType.BELONGS_TO ||
|
||||||
|
type == RelationshipType.BELONGS_TO_MANY;
|
||||||
|
|
||||||
|
bool get isHas =>
|
||||||
|
type == RelationshipType.HAS_ONE || type == RelationshipType.HAS_MANY;
|
||||||
|
|
||||||
|
bool get isList => _isList ??=
|
||||||
|
dartType.isAssignableTo(typeProvider.listType) || dartType.name == 'List';
|
||||||
|
|
||||||
|
DartType get modelType {
|
||||||
|
if (_modelType != null) return _modelType;
|
||||||
|
DartType searchType = dartType;
|
||||||
|
var ormChecker = new TypeChecker.fromRuntime(ORM);
|
||||||
|
|
||||||
|
// Get inner type from List if any...
|
||||||
|
if (!isSingular) {
|
||||||
|
if (!isList)
|
||||||
|
throw '"$originalName" is a many-to-one relationship, and thus it should be represented as a List within your Dart class. You have it represented as ${dartType.name}.';
|
||||||
|
else {
|
||||||
|
var iType = dartType as InterfaceType;
|
||||||
|
if (iType.typeArguments.isEmpty)
|
||||||
|
throw '"$originalName" is a many-to-one relationship, and should be modeled as a List that references another model type. Example: `List<T>`, where T is a model type.';
|
||||||
|
else
|
||||||
|
searchType = iType.typeArguments.first;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while (searchType != null) {
|
||||||
|
var classElement = searchType.element as ClassElement;
|
||||||
|
var ormAnnotation = ormChecker.firstAnnotationOf(classElement);
|
||||||
|
|
||||||
|
if (ormAnnotation != null) {
|
||||||
|
_modelTypeORM = ormAnnotation;
|
||||||
|
return _modelType = searchType;
|
||||||
|
} else {
|
||||||
|
// If we didn't find an @ORM(), then refer to the parent type.
|
||||||
|
searchType = classElement.supertype;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new StateError(
|
||||||
|
'Neither ${dartType.name} nor its parent types are annotated with an @ORM() annotation. It is impossible to compute this relationship.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<PostgresBuildContext> get modelTypeContext async {
|
||||||
|
if (_modelTypeContext != null) return _modelTypeContext;
|
||||||
|
var reader = new ConstantReader(_modelTypeORM);
|
||||||
|
if (reader.isNull)
|
||||||
|
reader = null;
|
||||||
|
else
|
||||||
|
reader = reader.read('tableName');
|
||||||
|
var orm = reader == null
|
||||||
|
? new ORM()
|
||||||
|
: new ORM(reader.isString ? reader.stringValue : null);
|
||||||
|
return _modelTypeContext = await buildContext(modelType.element, orm,
|
||||||
|
buildStep, resolver, autoSnakeCaseNames, autoIdAndDateFields);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,20 +1,35 @@
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
import 'package:analyzer/dart/ast/ast.dart';
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
import 'package:angel_orm/angel_orm.dart';
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
import 'package:build/build.dart';
|
import 'package:build/build.dart';
|
||||||
import 'package:code_builder/dart/async.dart';
|
|
||||||
import 'package:code_builder/dart/core.dart';
|
import 'package:code_builder/dart/core.dart';
|
||||||
import 'package:code_builder/code_builder.dart';
|
import 'package:code_builder/code_builder.dart';
|
||||||
import 'package:inflection/inflection.dart';
|
|
||||||
import 'package:path/path.dart' as p;
|
import 'package:path/path.dart' as p;
|
||||||
import 'package:recase/recase.dart';
|
import 'package:recase/recase.dart';
|
||||||
import 'package:source_gen/src/annotation.dart';
|
import 'package:source_gen/source_gen.dart' hide LibraryBuilder;
|
||||||
import 'package:source_gen/src/utils.dart';
|
|
||||||
import 'package:source_gen/source_gen.dart';
|
|
||||||
import 'build_context.dart';
|
import 'build_context.dart';
|
||||||
import 'postgres_build_context.dart';
|
import 'postgres_build_context.dart';
|
||||||
|
|
||||||
class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
|
static const List<TypeChecker> primitives = const [
|
||||||
|
const TypeChecker.fromRuntime(String),
|
||||||
|
const TypeChecker.fromRuntime(int),
|
||||||
|
const TypeChecker.fromRuntime(bool),
|
||||||
|
const TypeChecker.fromRuntime(double),
|
||||||
|
const TypeChecker.fromRuntime(num),
|
||||||
|
];
|
||||||
|
|
||||||
|
static final ExpressionBuilder id = reference('id'),
|
||||||
|
params = reference('params'),
|
||||||
|
connection = reference('connection'),
|
||||||
|
query = reference('query'),
|
||||||
|
buildQuery = reference('buildQuery'),
|
||||||
|
applyData = reference('applyData'),
|
||||||
|
where = reference('query').property('where'),
|
||||||
|
toId = reference('toId'),
|
||||||
|
data = reference('data');
|
||||||
|
|
||||||
final bool autoSnakeCaseNames;
|
final bool autoSnakeCaseNames;
|
||||||
|
|
||||||
final bool autoIdAndDateFields;
|
final bool autoIdAndDateFields;
|
||||||
|
@ -24,7 +39,7 @@ class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Future<String> generateForAnnotatedElement(
|
Future<String> generateForAnnotatedElement(
|
||||||
Element element, ORM annotation, BuildStep buildStep) async {
|
Element element, ConstantReader annotation, BuildStep buildStep) async {
|
||||||
if (buildStep.inputId.path.contains('.service.g.dart')) {
|
if (buildStep.inputId.path.contains('.service.g.dart')) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -32,14 +47,14 @@ class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
if (element is! ClassElement)
|
if (element is! ClassElement)
|
||||||
throw 'Only classes can be annotated with @ORM().';
|
throw 'Only classes can be annotated with @ORM().';
|
||||||
var resolver = await buildStep.resolver;
|
var resolver = await buildStep.resolver;
|
||||||
var lib =
|
var lib = await generateOrmLibrary(element.library, resolver, buildStep)
|
||||||
generateOrmLibrary(element.library, resolver, buildStep).buildAst();
|
.then((l) => l.buildAst());
|
||||||
if (lib == null) return null;
|
if (lib == null) return null;
|
||||||
return prettyToSource(lib);
|
return prettyToSource(lib);
|
||||||
}
|
}
|
||||||
|
|
||||||
LibraryBuilder generateOrmLibrary(
|
Future<LibraryBuilder> generateOrmLibrary(LibraryElement libraryElement,
|
||||||
LibraryElement libraryElement, Resolver resolver, BuildStep buildStep) {
|
Resolver resolver, BuildStep buildStep) async {
|
||||||
var lib = new LibraryBuilder();
|
var lib = new LibraryBuilder();
|
||||||
lib.addDirective(new ImportBuilder('dart:async'));
|
lib.addDirective(new ImportBuilder('dart:async'));
|
||||||
lib.addDirective(
|
lib.addDirective(
|
||||||
|
@ -51,19 +66,18 @@ class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
p.basenameWithoutExtension(buildStep.inputId.path));
|
p.basenameWithoutExtension(buildStep.inputId.path));
|
||||||
lib.addDirective(new ImportBuilder('$pathName.orm.g.dart'));
|
lib.addDirective(new ImportBuilder('$pathName.orm.g.dart'));
|
||||||
|
|
||||||
var elements = getElementsFromLibraryElement(libraryElement)
|
var elements = libraryElement.definingCompilationUnit.unit.declarations
|
||||||
.where((el) => el is ClassElement);
|
.where((el) => el is ClassDeclaration);
|
||||||
Map<ClassElement, PostgresBuildContext> contexts = {};
|
Map<ClassElement, PostgresBuildContext> contexts = {};
|
||||||
List<String> done = [];
|
List<String> done = [];
|
||||||
|
|
||||||
for (var element in elements) {
|
for (ClassDeclaration element in elements) {
|
||||||
if (!done.contains(element.name)) {
|
if (!done.contains(element.name)) {
|
||||||
var ann = element.metadata
|
var ann = ormTypeChecker.firstAnnotationOf(element.element);
|
||||||
.firstWhere((a) => matchAnnotation(ORM, a), orElse: () => null);
|
|
||||||
if (ann != null) {
|
if (ann != null) {
|
||||||
contexts[element] = buildContext(
|
contexts[element.element] = await buildContext(
|
||||||
element,
|
element.element,
|
||||||
instantiateAnnotation(ann),
|
reviveOrm(new ConstantReader(ann)),
|
||||||
buildStep,
|
buildStep,
|
||||||
resolver,
|
resolver,
|
||||||
autoSnakeCaseNames != false,
|
autoSnakeCaseNames != false,
|
||||||
|
@ -106,44 +120,57 @@ class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
|
|
||||||
clazz.addMethod(buildQueryMethod(ctx));
|
clazz.addMethod(buildQueryMethod(ctx));
|
||||||
clazz.addMethod(buildToIdMethod(ctx));
|
clazz.addMethod(buildToIdMethod(ctx));
|
||||||
|
clazz.addMethod(buildApplyDataMethod(ctx));
|
||||||
|
|
||||||
var params = reference('params'),
|
clazz.addMethod(buildIndexMethod(ctx));
|
||||||
buildQuery = reference('buildQuery'),
|
clazz.addMethod(buildCreateMethod(ctx));
|
||||||
connection = reference('connection'),
|
clazz.addMethod(buildReadOrDeleteMethod('read', 'get', ctx));
|
||||||
query = reference('query');
|
clazz.addMethod(buildReadOrDeleteMethod('remove', 'delete', ctx));
|
||||||
|
clazz.addMethod(buildUpdateMethod(ctx));
|
||||||
// Future<List<T>> index([p]) => buildQuery(p).get(connection).toList();
|
clazz.addMethod(buildModifyMethod(ctx));
|
||||||
clazz.addMethod(lambda(
|
|
||||||
'index',
|
|
||||||
buildQuery
|
|
||||||
.call([params]).invoke('get', [connection]).invoke('toList', []),
|
|
||||||
returnType: new TypeBuilder('Future', genericTypes: [
|
|
||||||
new TypeBuilder('List', genericTypes: [ctx.modelClassBuilder])
|
|
||||||
]))
|
|
||||||
..addPositional(parameter('params', [lib$core.Map]))
|
|
||||||
..addAnnotation(lib$core.override));
|
|
||||||
|
|
||||||
var read = new MethodBuilder('read',
|
|
||||||
returnType:
|
|
||||||
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
|
||||||
parseParams(read, ctx, id: true);
|
|
||||||
read.addStatement(query.invoke('get', [connection]).property('first'));
|
|
||||||
clazz.addMethod(read);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
return clazz;
|
return clazz;
|
||||||
}
|
}
|
||||||
|
|
||||||
MethodBuilder buildQueryMethod(PostgresBuildContext ctx) {
|
MethodBuilder buildQueryMethod(PostgresBuildContext ctx) {
|
||||||
var meth =
|
var meth =
|
||||||
new MethodBuilder('buildQuery', returnType: ctx.queryClassBuilder);
|
new MethodBuilder('buildQuery', returnType: ctx.queryClassBuilder)
|
||||||
|
..addPositional(parameter('params', [lib$core.Map]));
|
||||||
|
var paramQuery = params[literal('query')];
|
||||||
|
meth.addStatement(
|
||||||
|
varField('query', value: ctx.queryClassBuilder.newInstance([])));
|
||||||
|
var ifStmt = ifThen(paramQuery.isInstanceOf(lib$core.Map));
|
||||||
|
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
var alias = ctx.resolveFieldName(f.name);
|
||||||
|
var queryKey = paramQuery[literal(alias)];
|
||||||
|
|
||||||
|
if (f.type.isDynamic ||
|
||||||
|
f.type.isObject ||
|
||||||
|
f.type.isObject ||
|
||||||
|
primitives.any((t) => t.isAssignableFromType(f.type))) {
|
||||||
|
ifStmt
|
||||||
|
.addStatement(where.property(f.name).invoke('equals', [queryKey]));
|
||||||
|
} else if (dateTimeTypeChecker.isAssignableFromType(f.type)) {
|
||||||
|
var dt = queryKey
|
||||||
|
.isInstanceOf(lib$core.String)
|
||||||
|
.ternary(lib$core.DateTime.invoke('parse', [queryKey]), queryKey);
|
||||||
|
ifStmt.addStatement(
|
||||||
|
where.property(f.name).invoke('equals', [updatedAt(dt)]));
|
||||||
|
} else {
|
||||||
|
print(
|
||||||
|
'Cannot compute service query binding for field "${f.name}" in ${ctx.originalClassName}');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
meth.addStatement(ifStmt);
|
||||||
|
meth.addStatement(query.asReturn());
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
MethodBuilder buildToIdMethod(PostgresBuildContext ctx) {
|
MethodBuilder buildToIdMethod(PostgresBuildContext ctx) {
|
||||||
var meth = new MethodBuilder('toId', returnType: lib$core.int);
|
var meth = new MethodBuilder('toId', returnType: lib$core.int)
|
||||||
var id = reference('id');
|
..addPositional(parameter('id'));
|
||||||
|
|
||||||
meth.addStatement(ifThen(id.isInstanceOf(lib$core.int), [
|
meth.addStatement(ifThen(id.isInstanceOf(lib$core.int), [
|
||||||
id.asReturn(),
|
id.asReturn(),
|
||||||
|
@ -160,9 +187,166 @@ class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
return meth;
|
return meth;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildIndexMethod(PostgresBuildContext ctx) {
|
||||||
|
// Future<List<T>> index([p]) => buildQuery(p).get(connection).toList();
|
||||||
|
return method('index', [
|
||||||
|
new TypeBuilder('Future', genericTypes: [
|
||||||
|
new TypeBuilder('List', genericTypes: [ctx.modelClassBuilder])
|
||||||
|
]),
|
||||||
|
parameter('params', [lib$core.Map]).asOptional(),
|
||||||
|
reference('buildQuery').call([params]).invoke('get', [connection]).invoke(
|
||||||
|
'toList',
|
||||||
|
[],
|
||||||
|
).asReturn(),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildReadOrDeleteMethod(
|
||||||
|
String name, String operation, PostgresBuildContext ctx) {
|
||||||
|
var throw404 = new MethodBuilder.closure()..addPositional(parameter('_'));
|
||||||
|
throw404.addStatement(new TypeBuilder('AngelHttpException').newInstance(
|
||||||
|
[],
|
||||||
|
constructor: 'notFound',
|
||||||
|
named: {
|
||||||
|
'message':
|
||||||
|
literal('No record found for ID ') + id.invoke('toString', []),
|
||||||
|
},
|
||||||
|
));
|
||||||
|
|
||||||
|
return method(name, [
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]),
|
||||||
|
parameter('id'),
|
||||||
|
parameter('params', [lib$core.Map]).asOptional(),
|
||||||
|
varField('query', value: buildQuery.call([params])),
|
||||||
|
where.property('id').invoke('equals', [
|
||||||
|
toId.call([id])
|
||||||
|
]),
|
||||||
|
query
|
||||||
|
.invoke(operation, [connection])
|
||||||
|
.property('first')
|
||||||
|
.invoke('catchError', [
|
||||||
|
throw404,
|
||||||
|
])
|
||||||
|
.asReturn(),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildApplyDataMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth =
|
||||||
|
new MethodBuilder('applyData', returnType: ctx.modelClassBuilder);
|
||||||
|
meth.addPositional(parameter('data'));
|
||||||
|
|
||||||
|
meth.addStatement(ifThen(
|
||||||
|
data.isInstanceOf(ctx.modelClassBuilder).or(data.equals(literal(null))),
|
||||||
|
[
|
||||||
|
data.asReturn(),
|
||||||
|
],
|
||||||
|
));
|
||||||
|
|
||||||
|
var ifStmt = new IfStatementBuilder(data.isInstanceOf(lib$core.Map));
|
||||||
|
ifStmt.addStatement(
|
||||||
|
varField('query', value: ctx.modelClassBuilder.newInstance([])));
|
||||||
|
|
||||||
|
applyFieldsToInstance(ctx, query, ifStmt.addStatement);
|
||||||
|
|
||||||
|
ifStmt.addStatement(query.asReturn());
|
||||||
|
|
||||||
|
ifStmt.setElse(
|
||||||
|
new TypeBuilder('AngelHttpException')
|
||||||
|
.newInstance([],
|
||||||
|
constructor: 'badRequest',
|
||||||
|
named: {'message': literal('Invalid data.')})
|
||||||
|
.asThrow(),
|
||||||
|
);
|
||||||
|
|
||||||
|
meth.addStatement(ifStmt);
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildCreateMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('create',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth
|
||||||
|
..addPositional(parameter('data'))
|
||||||
|
..addPositional(parameter('params', [lib$core.Map]).asOptional());
|
||||||
|
|
||||||
|
var rc = new ReCase(ctx.modelClassName);
|
||||||
|
meth.addStatement(
|
||||||
|
ctx.queryClassBuilder.invoke('insert${rc.pascalCase}', [
|
||||||
|
connection,
|
||||||
|
applyData.call([data])
|
||||||
|
]).asReturn(),
|
||||||
|
);
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildModifyMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('modify',
|
||||||
|
modifier: MethodModifier.asAsync,
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth
|
||||||
|
..addPositional(parameter('id'))
|
||||||
|
..addPositional(parameter('data'))
|
||||||
|
..addPositional(parameter('params', [lib$core.Map]).asOptional());
|
||||||
|
|
||||||
|
// read() by id
|
||||||
|
meth.addStatement(varField(
|
||||||
|
'query',
|
||||||
|
value: reference('read').call(
|
||||||
|
[
|
||||||
|
toId.call([id]),
|
||||||
|
params
|
||||||
|
],
|
||||||
|
).asAwait(),
|
||||||
|
));
|
||||||
|
|
||||||
|
var rc = new ReCase(ctx.modelClassName);
|
||||||
|
|
||||||
|
meth.addStatement(ifThen(data.isInstanceOf(ctx.modelClassBuilder), [
|
||||||
|
data.asAssign(query),
|
||||||
|
]));
|
||||||
|
|
||||||
|
var ifStmt = ifThen(data.isInstanceOf(lib$core.Map));
|
||||||
|
|
||||||
|
applyFieldsToInstance(ctx, query, ifStmt.addStatement);
|
||||||
|
meth.addStatement(ifStmt);
|
||||||
|
meth.addStatement(
|
||||||
|
ctx.queryClassBuilder
|
||||||
|
.invoke('update${rc.pascalCase}', [connection, query])
|
||||||
|
.asAwait()
|
||||||
|
.asReturn(),
|
||||||
|
);
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
|
MethodBuilder buildUpdateMethod(PostgresBuildContext ctx) {
|
||||||
|
var meth = new MethodBuilder('update',
|
||||||
|
returnType:
|
||||||
|
new TypeBuilder('Future', genericTypes: [ctx.modelClassBuilder]));
|
||||||
|
meth
|
||||||
|
..addPositional(parameter('id'))
|
||||||
|
..addPositional(parameter('data'))
|
||||||
|
..addPositional(parameter('params', [lib$core.Map]).asOptional());
|
||||||
|
|
||||||
|
var rc = new ReCase(ctx.modelClassName);
|
||||||
|
meth.addStatement(
|
||||||
|
ctx.queryClassBuilder.invoke('update${rc.pascalCase}', [
|
||||||
|
connection,
|
||||||
|
applyData.call([data])
|
||||||
|
]).asReturn(),
|
||||||
|
);
|
||||||
|
|
||||||
|
return meth;
|
||||||
|
}
|
||||||
|
|
||||||
void parseParams(MethodBuilder meth, PostgresBuildContext ctx, {bool id}) {
|
void parseParams(MethodBuilder meth, PostgresBuildContext ctx, {bool id}) {
|
||||||
meth.addStatement(varField('query',
|
meth.addStatement(varField('query',
|
||||||
value: reference('buildQuery').call([
|
value: buildQuery.call([
|
||||||
reference('params')
|
reference('params')
|
||||||
.notEquals(literal(null))
|
.notEquals(literal(null))
|
||||||
.ternary(reference('params'), map({}))
|
.ternary(reference('params'), map({}))
|
||||||
|
@ -175,4 +359,42 @@ class PostgresServiceGenerator extends GeneratorForAnnotation<ORM> {
|
||||||
]));
|
]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void applyFieldsToInstance(PostgresBuildContext ctx, ExpressionBuilder query,
|
||||||
|
void addStatement(StatementBuilder statement)) {
|
||||||
|
ctx.fields.forEach((f) {
|
||||||
|
var alias = ctx.resolveFieldName(f.name);
|
||||||
|
var dataKey = data[literal(alias)];
|
||||||
|
ExpressionBuilder target;
|
||||||
|
|
||||||
|
// Skip `id`
|
||||||
|
if (autoIdAndDateFields != false && f.name == 'id') return;
|
||||||
|
|
||||||
|
if (f.type.isDynamic ||
|
||||||
|
f.type.isObject ||
|
||||||
|
primitives.any((t) => t.isAssignableFromType(f.type))) {
|
||||||
|
target = dataKey;
|
||||||
|
} else if (dateTimeTypeChecker.isAssignableFromType(f.type)) {
|
||||||
|
var dt = dataKey
|
||||||
|
.isInstanceOf(lib$core.String)
|
||||||
|
.ternary(lib$core.DateTime.invoke('parse', [dataKey]), dataKey);
|
||||||
|
target = updatedAt(dt);
|
||||||
|
} else {
|
||||||
|
print(
|
||||||
|
'Cannot compute service applyData() binding for field "${f.name}" in ${ctx.originalClassName}');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (target != null) {
|
||||||
|
addStatement(ifThen(data.invoke('containsKey', [literal(alias)]),
|
||||||
|
[target.asAssign(query.property(f.name))]));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ExpressionBuilder updatedAt(ExpressionBuilder dt) {
|
||||||
|
if (autoIdAndDateFields == false) return dt;
|
||||||
|
return dt
|
||||||
|
.notEquals(literal(null))
|
||||||
|
.ternary(dt, lib$core.DateTime.newInstance([], constructor: 'now'));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: angel_orm_generator
|
name: angel_orm_generator
|
||||||
version: 1.0.0-alpha+3
|
version: 1.0.0-alpha+4
|
||||||
description: Code generators for Angel's ORM.
|
description: Code generators for Angel's ORM.
|
||||||
author: Tobe O <thosakwe@gmail.com>
|
author: Tobe O <thosakwe@gmail.com>
|
||||||
homepage: https://github.com/angel-dart/orm
|
homepage: https://github.com/angel-dart/orm
|
||||||
|
@ -10,11 +10,11 @@ dependencies:
|
||||||
angel_serialize_generator: ^1.0.0-alpha
|
angel_serialize_generator: ^1.0.0-alpha
|
||||||
code_builder: ^1.0.0
|
code_builder: ^1.0.0
|
||||||
inflection: ^0.4.1
|
inflection: ^0.4.1
|
||||||
|
meta: ^1.0.0
|
||||||
recase: ^1.0.0
|
recase: ^1.0.0
|
||||||
source_gen: ^0.6.0
|
|
||||||
dev_dependencies:
|
dev_dependencies:
|
||||||
angel_diagnostics: ^1.0.0
|
angel_diagnostics: ^1.0.0
|
||||||
angel_framework: ^1.0.0
|
angel_framework: ^1.0.0
|
||||||
angel_test: ^1.0.0
|
angel_test: ^1.0.0
|
||||||
build_runner: ^0.3.0
|
build_runner: ^0.5.0
|
||||||
test: ^0.12.0
|
test: ^0.12.0
|
|
@ -1,21 +0,0 @@
|
||||||
import 'package:postgres/postgres.dart';
|
|
||||||
import 'package:test/test.dart';
|
|
||||||
import 'models/role.dart';
|
|
||||||
import 'models/role.orm.g.dart';
|
|
||||||
import 'models/user.dart';
|
|
||||||
import 'models/user.orm.g.dart';
|
|
||||||
import 'common.dart';
|
|
||||||
|
|
||||||
main() {
|
|
||||||
PostgreSQLConnection connection;
|
|
||||||
Role manager, clerk;
|
|
||||||
User john;
|
|
||||||
|
|
||||||
setUp(() async {
|
|
||||||
connection = await connectToPostgres(['user', 'role']);
|
|
||||||
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
tearDown(() => connection.close());
|
|
||||||
}
|
|
|
@ -111,7 +111,7 @@ main() {
|
||||||
|
|
||||||
test('insert sets relationship', () {
|
test('insert sets relationship', () {
|
||||||
expect(deathlyHallows.author, isNotNull);
|
expect(deathlyHallows.author, isNotNull);
|
||||||
expect((deathlyHallows.author as Author).name, rowling.name);
|
expect((deathlyHallows.author).name, rowling.name);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('delete stream', () async {
|
test('delete stream', () async {
|
||||||
|
@ -123,7 +123,7 @@ main() {
|
||||||
var book = books.first;
|
var book = books.first;
|
||||||
expect(book.id, deathlyHallows.id);
|
expect(book.id, deathlyHallows.id);
|
||||||
expect(book.author, isNotNull);
|
expect(book.author, isNotNull);
|
||||||
expect((book.author as Author).name, rowling.name);
|
expect((book.author).name, rowling.name);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('update book', () async {
|
test('update book', () async {
|
||||||
|
|
67
angel_orm_generator/test/has_many_test.dart
Normal file
67
angel_orm_generator/test/has_many_test.dart
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'package:test/test.dart';
|
||||||
|
import 'models/fruit.dart';
|
||||||
|
import 'models/fruit.orm.g.dart';
|
||||||
|
import 'models/tree.dart';
|
||||||
|
import 'models/tree.orm.g.dart';
|
||||||
|
import 'common.dart';
|
||||||
|
|
||||||
|
main() {
|
||||||
|
PostgreSQLConnection connection;
|
||||||
|
Tree appleTree;
|
||||||
|
int treeId;
|
||||||
|
|
||||||
|
setUp(() async {
|
||||||
|
connection = await connectToPostgres(['tree', 'fruit']);
|
||||||
|
appleTree = await TreeQuery.insert(connection, rings: 10);
|
||||||
|
treeId = int.parse(appleTree.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('list is empty if there is nothing', () {
|
||||||
|
expect(appleTree.rings, 10);
|
||||||
|
expect(appleTree.fruits, isEmpty);
|
||||||
|
});
|
||||||
|
|
||||||
|
group('mutations', () {
|
||||||
|
Fruit apple, banana;
|
||||||
|
|
||||||
|
void verify(Tree tree) {
|
||||||
|
print(tree.fruits.map((f) => f.toJson()).toList());
|
||||||
|
expect(tree.fruits, hasLength(2));
|
||||||
|
expect(tree.fruits[0].commonName, apple.commonName);
|
||||||
|
expect(tree.fruits[1].commonName, banana.commonName);
|
||||||
|
}
|
||||||
|
|
||||||
|
setUp(() async {
|
||||||
|
apple = await FruitQuery.insert(
|
||||||
|
connection,
|
||||||
|
treeId: treeId,
|
||||||
|
commonName: 'Apple',
|
||||||
|
);
|
||||||
|
|
||||||
|
banana = await FruitQuery.insert(
|
||||||
|
connection,
|
||||||
|
treeId: treeId,
|
||||||
|
commonName: 'Banana',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('can fetch any children', () async {
|
||||||
|
var tree = await TreeQuery.getOne(treeId, connection);
|
||||||
|
verify(tree);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('sets on update', () async {
|
||||||
|
var tq = new TreeQuery()..where.id.equals(treeId);
|
||||||
|
var tree = await tq.update(connection, rings: 24).first;
|
||||||
|
verify(tree);
|
||||||
|
expect(tree.rings, 24);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('sets on delete', () async {
|
||||||
|
var tq = new TreeQuery()..where.id.equals(treeId);
|
||||||
|
var tree = await tq.delete(connection).first;
|
||||||
|
verify(tree);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
|
@ -4,7 +4,6 @@ part of angel_orm.generator.models.author;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
// Target: class _Author
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class Author extends _Author {
|
class Author extends _Author {
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: PostgresORMGenerator
|
// Generator: PostgresOrmGenerator
|
||||||
// Target: class _Author
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
150
angel_orm_generator/test/models/author.service.g.dart
Normal file
150
angel_orm_generator/test/models/author.service.g.dart
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'author.dart';
|
||||||
|
import 'author.orm.g.dart';
|
||||||
|
|
||||||
|
class AuthorService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
AuthorService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
AuthorQuery buildQuery(Map params) {
|
||||||
|
var query = new AuthorQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.name.equals(params['query']['name']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Author applyData(data) {
|
||||||
|
if (data is Author || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Author();
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Author>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Author> create(data, [Map params]) {
|
||||||
|
return AuthorQuery.insertAuthor(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Author> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Author> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Author> update(id, data, [Map params]) {
|
||||||
|
return AuthorQuery.updateAuthor(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Author> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Author) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await AuthorQuery.updateAuthor(connection, query);
|
||||||
|
}
|
||||||
|
}
|
|
@ -11,5 +11,6 @@ part 'book.g.dart';
|
||||||
class _Book extends Model {
|
class _Book extends Model {
|
||||||
@belongsTo
|
@belongsTo
|
||||||
Author author;
|
Author author;
|
||||||
|
int authorId;
|
||||||
String name;
|
String name;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ part of angel_orm.generator.models.book;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
// Target: class _Book
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class Book extends _Book {
|
class Book extends _Book {
|
||||||
|
@ -12,7 +11,10 @@ class Book extends _Book {
|
||||||
String id;
|
String id;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
dynamic author;
|
Author author;
|
||||||
|
|
||||||
|
@override
|
||||||
|
int authorId;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String name;
|
String name;
|
||||||
|
@ -23,12 +25,23 @@ class Book extends _Book {
|
||||||
@override
|
@override
|
||||||
DateTime updatedAt;
|
DateTime updatedAt;
|
||||||
|
|
||||||
Book({this.id, this.author, this.name, this.createdAt, this.updatedAt});
|
Book(
|
||||||
|
{this.id,
|
||||||
|
this.author,
|
||||||
|
this.authorId,
|
||||||
|
this.name,
|
||||||
|
this.createdAt,
|
||||||
|
this.updatedAt});
|
||||||
|
|
||||||
factory Book.fromJson(Map data) {
|
factory Book.fromJson(Map data) {
|
||||||
return new Book(
|
return new Book(
|
||||||
id: data['id'],
|
id: data['id'],
|
||||||
author: data['author'],
|
author: data['author'] == null
|
||||||
|
? null
|
||||||
|
: (data['author'] is Author
|
||||||
|
? data['author']
|
||||||
|
: new Author.fromJson(data['author'])),
|
||||||
|
authorId: data['author_id'],
|
||||||
name: data['name'],
|
name: data['name'],
|
||||||
createdAt: data['created_at'] is DateTime
|
createdAt: data['created_at'] is DateTime
|
||||||
? data['created_at']
|
? data['created_at']
|
||||||
|
@ -45,6 +58,7 @@ class Book extends _Book {
|
||||||
Map<String, dynamic> toJson() => {
|
Map<String, dynamic> toJson() => {
|
||||||
'id': id,
|
'id': id,
|
||||||
'author': author,
|
'author': author,
|
||||||
|
'author_id': authorId,
|
||||||
'name': name,
|
'name': name,
|
||||||
'created_at': createdAt == null ? null : createdAt.toIso8601String(),
|
'created_at': createdAt == null ? null : createdAt.toIso8601String(),
|
||||||
'updated_at': updatedAt == null ? null : updatedAt.toIso8601String()
|
'updated_at': updatedAt == null ? null : updatedAt.toIso8601String()
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: PostgresORMGenerator
|
// Generator: PostgresOrmGenerator
|
||||||
// Target: class _Book
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
157
angel_orm_generator/test/models/book.service.g.dart
Normal file
157
angel_orm_generator/test/models/book.service.g.dart
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'book.dart';
|
||||||
|
import 'book.orm.g.dart';
|
||||||
|
|
||||||
|
class BookService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
BookService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
BookQuery buildQuery(Map params) {
|
||||||
|
var query = new BookQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.name.equals(params['query']['name']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.authorId.equals(params['query']['author_id']);
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Book applyData(data) {
|
||||||
|
if (data is Book || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Book();
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('author_id')) {
|
||||||
|
query.authorId = data['author_id'];
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Book>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Book> create(data, [Map params]) {
|
||||||
|
return BookQuery.insertBook(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Book> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Book> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Book> update(id, data, [Map params]) {
|
||||||
|
return BookQuery.updateBook(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Book> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Book) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('author_id')) {
|
||||||
|
query.authorId = data['author_id'];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await BookQuery.updateBook(connection, query);
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,7 +4,6 @@ part of angel_orm.generator.models.car;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
// Target: class _Car
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class Car extends _Car {
|
class Car extends _Car {
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: PostgresORMGenerator
|
// Generator: PostgresOrmGenerator
|
||||||
// Target: class _Car
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
189
angel_orm_generator/test/models/car.service.g.dart
Normal file
189
angel_orm_generator/test/models/car.service.g.dart
Normal file
|
@ -0,0 +1,189 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'car.dart';
|
||||||
|
import 'car.orm.g.dart';
|
||||||
|
|
||||||
|
class CarService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
CarService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
CarQuery buildQuery(Map params) {
|
||||||
|
var query = new CarQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.make.equals(params['query']['make']);
|
||||||
|
query.where.description.equals(params['query']['description']);
|
||||||
|
query.where.familyFriendly.equals(params['query']['family_friendly']);
|
||||||
|
query.where.recalledAt.equals(params['query']['recalled_at'] is String
|
||||||
|
? DateTime.parse(params['query']['recalled_at'])
|
||||||
|
: params['query']['recalled_at'] != null
|
||||||
|
? params['query']['recalled_at'] is String
|
||||||
|
? DateTime.parse(params['query']['recalled_at'])
|
||||||
|
: params['query']['recalled_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Car applyData(data) {
|
||||||
|
if (data is Car || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Car();
|
||||||
|
if (data.containsKey('make')) {
|
||||||
|
query.make = data['make'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('description')) {
|
||||||
|
query.description = data['description'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('family_friendly')) {
|
||||||
|
query.familyFriendly = data['family_friendly'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('recalled_at')) {
|
||||||
|
query.recalledAt = data['recalled_at'] is String
|
||||||
|
? DateTime.parse(data['recalled_at'])
|
||||||
|
: data['recalled_at'] != null
|
||||||
|
? data['recalled_at'] is String
|
||||||
|
? DateTime.parse(data['recalled_at'])
|
||||||
|
: data['recalled_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Car>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Car> create(data, [Map params]) {
|
||||||
|
return CarQuery.insertCar(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Car> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Car> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Car> update(id, data, [Map params]) {
|
||||||
|
return CarQuery.updateCar(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Car> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Car) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('make')) {
|
||||||
|
query.make = data['make'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('description')) {
|
||||||
|
query.description = data['description'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('family_friendly')) {
|
||||||
|
query.familyFriendly = data['family_friendly'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('recalled_at')) {
|
||||||
|
query.recalledAt = data['recalled_at'] is String
|
||||||
|
? DateTime.parse(data['recalled_at'])
|
||||||
|
: data['recalled_at'] != null
|
||||||
|
? data['recalled_at'] is String
|
||||||
|
? DateTime.parse(data['recalled_at'])
|
||||||
|
: data['recalled_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await CarQuery.updateCar(connection, query);
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,7 +4,6 @@ part of angel_orm_generator.test.models.foot;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
// Target: class _Foot
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class Foot extends _Foot {
|
class Foot extends _Foot {
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: PostgresORMGenerator
|
// Generator: PostgresOrmGenerator
|
||||||
// Target: class _Foot
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
157
angel_orm_generator/test/models/foot.service.g.dart
Normal file
157
angel_orm_generator/test/models/foot.service.g.dart
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'foot.dart';
|
||||||
|
import 'foot.orm.g.dart';
|
||||||
|
|
||||||
|
class FootService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
FootService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
FootQuery buildQuery(Map params) {
|
||||||
|
var query = new FootQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.legId.equals(params['query']['leg_id']);
|
||||||
|
query.where.nToes.equals(params['query']['n_toes']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Foot applyData(data) {
|
||||||
|
if (data is Foot || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Foot();
|
||||||
|
if (data.containsKey('leg_id')) {
|
||||||
|
query.legId = data['leg_id'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('n_toes')) {
|
||||||
|
query.nToes = data['n_toes'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Foot>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Foot> create(data, [Map params]) {
|
||||||
|
return FootQuery.insertFoot(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Foot> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Foot> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Foot> update(id, data, [Map params]) {
|
||||||
|
return FootQuery.updateFoot(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Foot> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Foot) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('leg_id')) {
|
||||||
|
query.legId = data['leg_id'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('n_toes')) {
|
||||||
|
query.nToes = data['n_toes'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await FootQuery.updateFoot(connection, query);
|
||||||
|
}
|
||||||
|
}
|
13
angel_orm_generator/test/models/fruit.dart
Normal file
13
angel_orm_generator/test/models/fruit.dart
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
library angel_orm_generator.test.models.fruit;
|
||||||
|
|
||||||
|
import 'package:angel_model/angel_model.dart';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:angel_serialize/angel_serialize.dart';
|
||||||
|
part 'fruit.g.dart';
|
||||||
|
|
||||||
|
@serializable
|
||||||
|
@orm
|
||||||
|
class _Fruit extends Model {
|
||||||
|
int treeId;
|
||||||
|
String commonName;
|
||||||
|
}
|
1
angel_orm_generator/test/models/fruit.down.g.sql
Normal file
1
angel_orm_generator/test/models/fruit.down.g.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
DROP TABLE "fruits";
|
58
angel_orm_generator/test/models/fruit.g.dart
Normal file
58
angel_orm_generator/test/models/fruit.g.dart
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
part of angel_orm_generator.test.models.fruit;
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: JsonModelGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
class Fruit extends _Fruit {
|
||||||
|
@override
|
||||||
|
String id;
|
||||||
|
|
||||||
|
@override
|
||||||
|
int treeId;
|
||||||
|
|
||||||
|
@override
|
||||||
|
String commonName;
|
||||||
|
|
||||||
|
@override
|
||||||
|
DateTime createdAt;
|
||||||
|
|
||||||
|
@override
|
||||||
|
DateTime updatedAt;
|
||||||
|
|
||||||
|
Fruit(
|
||||||
|
{this.id, this.treeId, this.commonName, this.createdAt, this.updatedAt});
|
||||||
|
|
||||||
|
factory Fruit.fromJson(Map data) {
|
||||||
|
return new Fruit(
|
||||||
|
id: data['id'],
|
||||||
|
treeId: data['tree_id'],
|
||||||
|
commonName: data['common_name'],
|
||||||
|
createdAt: data['created_at'] is DateTime
|
||||||
|
? data['created_at']
|
||||||
|
: (data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: null),
|
||||||
|
updatedAt: data['updated_at'] is DateTime
|
||||||
|
? data['updated_at']
|
||||||
|
: (data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: null));
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() => {
|
||||||
|
'id': id,
|
||||||
|
'tree_id': treeId,
|
||||||
|
'common_name': commonName,
|
||||||
|
'created_at': createdAt == null ? null : createdAt.toIso8601String(),
|
||||||
|
'updated_at': updatedAt == null ? null : updatedAt.toIso8601String()
|
||||||
|
};
|
||||||
|
|
||||||
|
static Fruit parse(Map map) => new Fruit.fromJson(map);
|
||||||
|
|
||||||
|
Fruit clone() {
|
||||||
|
return new Fruit.fromJson(toJson());
|
||||||
|
}
|
||||||
|
}
|
257
angel_orm_generator/test/models/fruit.orm.g.dart
Normal file
257
angel_orm_generator/test/models/fruit.orm.g.dart
Normal file
|
@ -0,0 +1,257 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresOrmGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'fruit.dart';
|
||||||
|
|
||||||
|
class FruitQuery {
|
||||||
|
final Map<FruitQuery, bool> _unions = {};
|
||||||
|
|
||||||
|
String _sortKey;
|
||||||
|
|
||||||
|
String _sortMode;
|
||||||
|
|
||||||
|
int limit;
|
||||||
|
|
||||||
|
int offset;
|
||||||
|
|
||||||
|
final List<FruitQueryWhere> _or = [];
|
||||||
|
|
||||||
|
final FruitQueryWhere where = new FruitQueryWhere();
|
||||||
|
|
||||||
|
void union(FruitQuery query) {
|
||||||
|
_unions[query] = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void unionAll(FruitQuery query) {
|
||||||
|
_unions[query] = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void sortDescending(String key) {
|
||||||
|
_sortMode = 'Descending';
|
||||||
|
_sortKey = ('' + key);
|
||||||
|
}
|
||||||
|
|
||||||
|
void sortAscending(String key) {
|
||||||
|
_sortMode = 'Ascending';
|
||||||
|
_sortKey = ('' + key);
|
||||||
|
}
|
||||||
|
|
||||||
|
void or(FruitQueryWhere selector) {
|
||||||
|
_or.add(selector);
|
||||||
|
}
|
||||||
|
|
||||||
|
String toSql([String prefix]) {
|
||||||
|
var buf = new StringBuffer();
|
||||||
|
buf.write(prefix != null
|
||||||
|
? prefix
|
||||||
|
: 'SELECT id, tree_id, common_name, created_at, updated_at FROM "fruits"');
|
||||||
|
if (prefix == null) {}
|
||||||
|
var whereClause = where.toWhereClause();
|
||||||
|
if (whereClause != null) {
|
||||||
|
buf.write(' ' + whereClause);
|
||||||
|
}
|
||||||
|
_or.forEach((x) {
|
||||||
|
var whereClause = x.toWhereClause(keyword: false);
|
||||||
|
if (whereClause != null) {
|
||||||
|
buf.write(' OR (' + whereClause + ')');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (prefix == null) {
|
||||||
|
if (limit != null) {
|
||||||
|
buf.write(' LIMIT ' + limit.toString());
|
||||||
|
}
|
||||||
|
if (offset != null) {
|
||||||
|
buf.write(' OFFSET ' + offset.toString());
|
||||||
|
}
|
||||||
|
if (_sortMode == 'Descending') {
|
||||||
|
buf.write(' ORDER BY "' + _sortKey + '" DESC');
|
||||||
|
}
|
||||||
|
if (_sortMode == 'Ascending') {
|
||||||
|
buf.write(' ORDER BY "' + _sortKey + '" ASC');
|
||||||
|
}
|
||||||
|
_unions.forEach((query, all) {
|
||||||
|
buf.write(' UNION');
|
||||||
|
if (all) {
|
||||||
|
buf.write(' ALL');
|
||||||
|
}
|
||||||
|
buf.write(' (');
|
||||||
|
var sql = query.toSql().replaceAll(';', '');
|
||||||
|
buf.write(sql + ')');
|
||||||
|
});
|
||||||
|
buf.write(';');
|
||||||
|
}
|
||||||
|
return buf.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
static Fruit parseRow(List row) {
|
||||||
|
var result = new Fruit.fromJson({
|
||||||
|
'id': row[0].toString(),
|
||||||
|
'tree_id': row[1],
|
||||||
|
'common_name': row[2],
|
||||||
|
'created_at': row[3],
|
||||||
|
'updated_at': row[4]
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<Fruit> get(PostgreSQLConnection connection) {
|
||||||
|
StreamController<Fruit> ctrl = new StreamController<Fruit>();
|
||||||
|
connection.query(toSql()).then((rows) async {
|
||||||
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
|
ctrl.close();
|
||||||
|
}).catchError(ctrl.addError);
|
||||||
|
return ctrl.stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Fruit> getOne(int id, PostgreSQLConnection connection) {
|
||||||
|
var query = new FruitQuery();
|
||||||
|
query.where.id.equals(id);
|
||||||
|
return query.get(connection).first.catchError((_) => null);
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<Fruit> update(PostgreSQLConnection connection,
|
||||||
|
{int treeId, String commonName, DateTime createdAt, DateTime updatedAt}) {
|
||||||
|
var buf = new StringBuffer(
|
||||||
|
'UPDATE "fruits" SET ("tree_id", "common_name", "created_at", "updated_at") = (@treeId, @commonName, @createdAt, @updatedAt) ');
|
||||||
|
var whereClause = where.toWhereClause();
|
||||||
|
if (whereClause != null) {
|
||||||
|
buf.write(whereClause);
|
||||||
|
}
|
||||||
|
var __ormNow__ = new DateTime.now();
|
||||||
|
var ctrl = new StreamController<Fruit>();
|
||||||
|
connection.query(
|
||||||
|
buf.toString() +
|
||||||
|
' RETURNING "id", "tree_id", "common_name", "created_at", "updated_at";',
|
||||||
|
substitutionValues: {
|
||||||
|
'treeId': treeId,
|
||||||
|
'commonName': commonName,
|
||||||
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
|
}).then((rows) async {
|
||||||
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
|
ctrl.close();
|
||||||
|
}).catchError(ctrl.addError);
|
||||||
|
return ctrl.stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<Fruit> delete(PostgreSQLConnection connection) {
|
||||||
|
StreamController<Fruit> ctrl = new StreamController<Fruit>();
|
||||||
|
connection
|
||||||
|
.query(toSql('DELETE FROM "fruits"') +
|
||||||
|
' RETURNING "id", "tree_id", "common_name", "created_at", "updated_at";')
|
||||||
|
.then((rows) async {
|
||||||
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
|
ctrl.close();
|
||||||
|
}).catchError(ctrl.addError);
|
||||||
|
return ctrl.stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Fruit> deleteOne(int id, PostgreSQLConnection connection) {
|
||||||
|
var query = new FruitQuery();
|
||||||
|
query.where.id.equals(id);
|
||||||
|
return query.delete(connection).first;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Fruit> insert(PostgreSQLConnection connection,
|
||||||
|
{int treeId,
|
||||||
|
String commonName,
|
||||||
|
DateTime createdAt,
|
||||||
|
DateTime updatedAt}) async {
|
||||||
|
var __ormNow__ = new DateTime.now();
|
||||||
|
var result = await connection.query(
|
||||||
|
'INSERT INTO "fruits" ("tree_id", "common_name", "created_at", "updated_at") VALUES (@treeId, @commonName, @createdAt, @updatedAt) RETURNING "id", "tree_id", "common_name", "created_at", "updated_at";',
|
||||||
|
substitutionValues: {
|
||||||
|
'treeId': treeId,
|
||||||
|
'commonName': commonName,
|
||||||
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
|
});
|
||||||
|
var output = parseRow(result[0]);
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Fruit> insertFruit(
|
||||||
|
PostgreSQLConnection connection, Fruit fruit) {
|
||||||
|
return FruitQuery.insert(connection,
|
||||||
|
treeId: fruit.treeId,
|
||||||
|
commonName: fruit.commonName,
|
||||||
|
createdAt: fruit.createdAt,
|
||||||
|
updatedAt: fruit.updatedAt);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Fruit> updateFruit(
|
||||||
|
PostgreSQLConnection connection, Fruit fruit) {
|
||||||
|
var query = new FruitQuery();
|
||||||
|
query.where.id.equals(int.parse(fruit.id));
|
||||||
|
return query
|
||||||
|
.update(connection,
|
||||||
|
treeId: fruit.treeId,
|
||||||
|
commonName: fruit.commonName,
|
||||||
|
createdAt: fruit.createdAt,
|
||||||
|
updatedAt: fruit.updatedAt)
|
||||||
|
.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Stream<Fruit> getAll(PostgreSQLConnection connection) =>
|
||||||
|
new FruitQuery().get(connection);
|
||||||
|
}
|
||||||
|
|
||||||
|
class FruitQueryWhere {
|
||||||
|
final NumericSqlExpressionBuilder<int> id =
|
||||||
|
new NumericSqlExpressionBuilder<int>();
|
||||||
|
|
||||||
|
final NumericSqlExpressionBuilder<int> treeId =
|
||||||
|
new NumericSqlExpressionBuilder<int>();
|
||||||
|
|
||||||
|
final StringSqlExpressionBuilder commonName =
|
||||||
|
new StringSqlExpressionBuilder();
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder createdAt =
|
||||||
|
new DateTimeSqlExpressionBuilder('fruits.created_at');
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder updatedAt =
|
||||||
|
new DateTimeSqlExpressionBuilder('fruits.updated_at');
|
||||||
|
|
||||||
|
String toWhereClause({bool keyword}) {
|
||||||
|
final List<String> expressions = [];
|
||||||
|
if (id.hasValue) {
|
||||||
|
expressions.add('fruits.id ' + id.compile());
|
||||||
|
}
|
||||||
|
if (treeId.hasValue) {
|
||||||
|
expressions.add('fruits.tree_id ' + treeId.compile());
|
||||||
|
}
|
||||||
|
if (commonName.hasValue) {
|
||||||
|
expressions.add('fruits.common_name ' + commonName.compile());
|
||||||
|
}
|
||||||
|
if (createdAt.hasValue) {
|
||||||
|
expressions.add(createdAt.compile());
|
||||||
|
}
|
||||||
|
if (updatedAt.hasValue) {
|
||||||
|
expressions.add(updatedAt.compile());
|
||||||
|
}
|
||||||
|
return expressions.isEmpty
|
||||||
|
? null
|
||||||
|
: ((keyword != false ? 'WHERE ' : '') + expressions.join(' AND '));
|
||||||
|
}
|
||||||
|
}
|
157
angel_orm_generator/test/models/fruit.service.g.dart
Normal file
157
angel_orm_generator/test/models/fruit.service.g.dart
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'fruit.dart';
|
||||||
|
import 'fruit.orm.g.dart';
|
||||||
|
|
||||||
|
class FruitService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
FruitService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
FruitQuery buildQuery(Map params) {
|
||||||
|
var query = new FruitQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.treeId.equals(params['query']['tree_id']);
|
||||||
|
query.where.commonName.equals(params['query']['common_name']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Fruit applyData(data) {
|
||||||
|
if (data is Fruit || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Fruit();
|
||||||
|
if (data.containsKey('tree_id')) {
|
||||||
|
query.treeId = data['tree_id'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('common_name')) {
|
||||||
|
query.commonName = data['common_name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Fruit>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Fruit> create(data, [Map params]) {
|
||||||
|
return FruitQuery.insertFruit(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Fruit> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Fruit> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Fruit> update(id, data, [Map params]) {
|
||||||
|
return FruitQuery.updateFruit(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Fruit> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Fruit) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('tree_id')) {
|
||||||
|
query.treeId = data['tree_id'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('common_name')) {
|
||||||
|
query.commonName = data['common_name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await FruitQuery.updateFruit(connection, query);
|
||||||
|
}
|
||||||
|
}
|
8
angel_orm_generator/test/models/fruit.up.g.sql
Normal file
8
angel_orm_generator/test/models/fruit.up.g.sql
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
CREATE TEMPORARY TABLE "fruits" (
|
||||||
|
"id" serial,
|
||||||
|
"tree_id" int,
|
||||||
|
"common_name" varchar,
|
||||||
|
"created_at" timestamp,
|
||||||
|
"updated_at" timestamp,
|
||||||
|
PRIMARY KEY(id)
|
||||||
|
);
|
|
@ -4,7 +4,6 @@ part of angel_orm_generator.test.models.leg;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
// Target: class _Leg
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class Leg extends _Leg {
|
class Leg extends _Leg {
|
||||||
|
@ -12,7 +11,7 @@ class Leg extends _Leg {
|
||||||
String id;
|
String id;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
dynamic foot;
|
Foot foot;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String name;
|
String name;
|
||||||
|
@ -28,7 +27,11 @@ class Leg extends _Leg {
|
||||||
factory Leg.fromJson(Map data) {
|
factory Leg.fromJson(Map data) {
|
||||||
return new Leg(
|
return new Leg(
|
||||||
id: data['id'],
|
id: data['id'],
|
||||||
foot: data['foot'],
|
foot: data['foot'] == null
|
||||||
|
? null
|
||||||
|
: (data['foot'] is Foot
|
||||||
|
? data['foot']
|
||||||
|
: new Foot.fromJson(data['foot'])),
|
||||||
name: data['name'],
|
name: data['name'],
|
||||||
createdAt: data['created_at'] is DateTime
|
createdAt: data['created_at'] is DateTime
|
||||||
? data['created_at']
|
? data['created_at']
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: PostgresORMGenerator
|
// Generator: PostgresOrmGenerator
|
||||||
// Target: class _Leg
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
150
angel_orm_generator/test/models/leg.service.g.dart
Normal file
150
angel_orm_generator/test/models/leg.service.g.dart
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'leg.dart';
|
||||||
|
import 'leg.orm.g.dart';
|
||||||
|
|
||||||
|
class LegService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
LegService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
LegQuery buildQuery(Map params) {
|
||||||
|
var query = new LegQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.name.equals(params['query']['name']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Leg applyData(data) {
|
||||||
|
if (data is Leg || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Leg();
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Leg>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Leg> create(data, [Map params]) {
|
||||||
|
return LegQuery.insertLeg(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Leg> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Leg> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Leg> update(id, data, [Map params]) {
|
||||||
|
return LegQuery.updateLeg(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Leg> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Leg) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await LegQuery.updateLeg(connection, query);
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,7 +4,6 @@ part of angel_orm_generator.test.models.role;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
// Target: class _Role
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class Role extends _Role {
|
class Role extends _Role {
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: PostgresORMGenerator
|
// Generator: PostgresOrmGenerator
|
||||||
// Target: class _Role
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
|
150
angel_orm_generator/test/models/role.service.g.dart
Normal file
150
angel_orm_generator/test/models/role.service.g.dart
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'role.dart';
|
||||||
|
import 'role.orm.g.dart';
|
||||||
|
|
||||||
|
class RoleService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
RoleService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
RoleQuery buildQuery(Map params) {
|
||||||
|
var query = new RoleQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.name.equals(params['query']['name']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Role applyData(data) {
|
||||||
|
if (data is Role || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Role();
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Role>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Role> create(data, [Map params]) {
|
||||||
|
return RoleQuery.insertRole(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Role> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Role> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Role> update(id, data, [Map params]) {
|
||||||
|
return RoleQuery.updateRole(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Role> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Role) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('name')) {
|
||||||
|
query.name = data['name'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await RoleQuery.updateRole(connection, query);
|
||||||
|
}
|
||||||
|
}
|
17
angel_orm_generator/test/models/tree.dart
Normal file
17
angel_orm_generator/test/models/tree.dart
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
library angel_orm_generator.test.models.tree;
|
||||||
|
|
||||||
|
import 'package:angel_model/angel_model.dart';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:angel_serialize/angel_serialize.dart';
|
||||||
|
import 'fruit.dart';
|
||||||
|
part 'tree.g.dart';
|
||||||
|
|
||||||
|
@serializable
|
||||||
|
@orm
|
||||||
|
class _Tree extends Model {
|
||||||
|
@Column(index: IndexType.UNIQUE, type: ColumnType.SMALL_INT)
|
||||||
|
int rings;
|
||||||
|
|
||||||
|
@hasMany
|
||||||
|
List<Fruit> fruits;
|
||||||
|
}
|
1
angel_orm_generator/test/models/tree.down.g.sql
Normal file
1
angel_orm_generator/test/models/tree.down.g.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
DROP TABLE "trees";
|
62
angel_orm_generator/test/models/tree.g.dart
Normal file
62
angel_orm_generator/test/models/tree.g.dart
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
part of angel_orm_generator.test.models.tree;
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: JsonModelGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
class Tree extends _Tree {
|
||||||
|
@override
|
||||||
|
String id;
|
||||||
|
|
||||||
|
@override
|
||||||
|
int rings;
|
||||||
|
|
||||||
|
@override
|
||||||
|
List<Fruit> fruits;
|
||||||
|
|
||||||
|
@override
|
||||||
|
DateTime createdAt;
|
||||||
|
|
||||||
|
@override
|
||||||
|
DateTime updatedAt;
|
||||||
|
|
||||||
|
Tree({this.id, this.rings, this.fruits, this.createdAt, this.updatedAt});
|
||||||
|
|
||||||
|
factory Tree.fromJson(Map data) {
|
||||||
|
return new Tree(
|
||||||
|
id: data['id'],
|
||||||
|
rings: data['rings'],
|
||||||
|
fruits: data['fruits'] is List
|
||||||
|
? data['fruits']
|
||||||
|
.map((x) =>
|
||||||
|
x == null ? null : (x is Fruit ? x : new Fruit.fromJson(x)))
|
||||||
|
.toList()
|
||||||
|
: null,
|
||||||
|
createdAt: data['created_at'] is DateTime
|
||||||
|
? data['created_at']
|
||||||
|
: (data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: null),
|
||||||
|
updatedAt: data['updated_at'] is DateTime
|
||||||
|
? data['updated_at']
|
||||||
|
: (data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: null));
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() => {
|
||||||
|
'id': id,
|
||||||
|
'rings': rings,
|
||||||
|
'fruits': fruits,
|
||||||
|
'created_at': createdAt == null ? null : createdAt.toIso8601String(),
|
||||||
|
'updated_at': updatedAt == null ? null : updatedAt.toIso8601String()
|
||||||
|
};
|
||||||
|
|
||||||
|
static Tree parse(Map map) => new Tree.fromJson(map);
|
||||||
|
|
||||||
|
Tree clone() {
|
||||||
|
return new Tree.fromJson(toJson());
|
||||||
|
}
|
||||||
|
}
|
262
angel_orm_generator/test/models/tree.orm.g.dart
Normal file
262
angel_orm_generator/test/models/tree.orm.g.dart
Normal file
|
@ -0,0 +1,262 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresOrmGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_orm/angel_orm.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'tree.dart';
|
||||||
|
import 'fruit.orm.g.dart';
|
||||||
|
|
||||||
|
class TreeQuery {
|
||||||
|
final Map<TreeQuery, bool> _unions = {};
|
||||||
|
|
||||||
|
String _sortKey;
|
||||||
|
|
||||||
|
String _sortMode;
|
||||||
|
|
||||||
|
int limit;
|
||||||
|
|
||||||
|
int offset;
|
||||||
|
|
||||||
|
final List<TreeQueryWhere> _or = [];
|
||||||
|
|
||||||
|
final TreeQueryWhere where = new TreeQueryWhere();
|
||||||
|
|
||||||
|
void union(TreeQuery query) {
|
||||||
|
_unions[query] = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void unionAll(TreeQuery query) {
|
||||||
|
_unions[query] = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void sortDescending(String key) {
|
||||||
|
_sortMode = 'Descending';
|
||||||
|
_sortKey = ('trees.' + key);
|
||||||
|
}
|
||||||
|
|
||||||
|
void sortAscending(String key) {
|
||||||
|
_sortMode = 'Ascending';
|
||||||
|
_sortKey = ('trees.' + key);
|
||||||
|
}
|
||||||
|
|
||||||
|
void or(TreeQueryWhere selector) {
|
||||||
|
_or.add(selector);
|
||||||
|
}
|
||||||
|
|
||||||
|
String toSql([String prefix]) {
|
||||||
|
var buf = new StringBuffer();
|
||||||
|
buf.write(prefix != null
|
||||||
|
? prefix
|
||||||
|
: 'SELECT trees.id, trees.rings, trees.created_at, trees.updated_at FROM "trees"');
|
||||||
|
if (prefix == null) {}
|
||||||
|
var whereClause = where.toWhereClause();
|
||||||
|
if (whereClause != null) {
|
||||||
|
buf.write(' ' + whereClause);
|
||||||
|
}
|
||||||
|
_or.forEach((x) {
|
||||||
|
var whereClause = x.toWhereClause(keyword: false);
|
||||||
|
if (whereClause != null) {
|
||||||
|
buf.write(' OR (' + whereClause + ')');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (prefix == null) {
|
||||||
|
if (limit != null) {
|
||||||
|
buf.write(' LIMIT ' + limit.toString());
|
||||||
|
}
|
||||||
|
if (offset != null) {
|
||||||
|
buf.write(' OFFSET ' + offset.toString());
|
||||||
|
}
|
||||||
|
if (_sortMode == 'Descending') {
|
||||||
|
buf.write(' ORDER BY "' + _sortKey + '" DESC');
|
||||||
|
}
|
||||||
|
if (_sortMode == 'Ascending') {
|
||||||
|
buf.write(' ORDER BY "' + _sortKey + '" ASC');
|
||||||
|
}
|
||||||
|
_unions.forEach((query, all) {
|
||||||
|
buf.write(' UNION');
|
||||||
|
if (all) {
|
||||||
|
buf.write(' ALL');
|
||||||
|
}
|
||||||
|
buf.write(' (');
|
||||||
|
var sql = query.toSql().replaceAll(';', '');
|
||||||
|
buf.write(sql + ')');
|
||||||
|
});
|
||||||
|
buf.write(';');
|
||||||
|
}
|
||||||
|
return buf.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
static Tree parseRow(List row) {
|
||||||
|
var result = new Tree.fromJson({
|
||||||
|
'id': row[0].toString(),
|
||||||
|
'rings': row[1],
|
||||||
|
'created_at': row[2],
|
||||||
|
'updated_at': row[3]
|
||||||
|
});
|
||||||
|
if (row.length > 4) {
|
||||||
|
result.fruits =
|
||||||
|
FruitQuery.parseRow([row[4], row[5], row[6], row[7], row[8]]);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<Tree> get(PostgreSQLConnection connection) {
|
||||||
|
StreamController<Tree> ctrl = new StreamController<Tree>();
|
||||||
|
connection.query(toSql()).then((rows) async {
|
||||||
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
var fruitQuery = new FruitQuery();
|
||||||
|
fruitQuery.where.treeId.equals(row[0]);
|
||||||
|
parsed.fruits =
|
||||||
|
await fruitQuery.get(connection).toList().catchError((_) => []);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
|
ctrl.close();
|
||||||
|
}).catchError(ctrl.addError);
|
||||||
|
return ctrl.stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Tree> getOne(int id, PostgreSQLConnection connection) {
|
||||||
|
var query = new TreeQuery();
|
||||||
|
query.where.id.equals(id);
|
||||||
|
return query.get(connection).first.catchError((_) => null);
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<Tree> update(PostgreSQLConnection connection,
|
||||||
|
{int rings, DateTime createdAt, DateTime updatedAt}) {
|
||||||
|
var buf = new StringBuffer(
|
||||||
|
'UPDATE "trees" SET ("rings", "created_at", "updated_at") = (@rings, @createdAt, @updatedAt) ');
|
||||||
|
var whereClause = where.toWhereClause();
|
||||||
|
if (whereClause != null) {
|
||||||
|
buf.write(whereClause);
|
||||||
|
}
|
||||||
|
var __ormNow__ = new DateTime.now();
|
||||||
|
var ctrl = new StreamController<Tree>();
|
||||||
|
connection.query(
|
||||||
|
buf.toString() +
|
||||||
|
' RETURNING "id", "rings", "created_at", "updated_at";',
|
||||||
|
substitutionValues: {
|
||||||
|
'rings': rings,
|
||||||
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
|
}).then((rows) async {
|
||||||
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
var fruitQuery = new FruitQuery();
|
||||||
|
fruitQuery.where.treeId.equals(row[0]);
|
||||||
|
parsed.fruits =
|
||||||
|
await fruitQuery.get(connection).toList().catchError((_) => []);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
|
ctrl.close();
|
||||||
|
}).catchError(ctrl.addError);
|
||||||
|
return ctrl.stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
Stream<Tree> delete(PostgreSQLConnection connection) {
|
||||||
|
StreamController<Tree> ctrl = new StreamController<Tree>();
|
||||||
|
connection
|
||||||
|
.query(toSql('DELETE FROM "trees"') +
|
||||||
|
' RETURNING "id", "rings", "created_at", "updated_at";')
|
||||||
|
.then((rows) async {
|
||||||
|
var futures = rows.map((row) async {
|
||||||
|
var parsed = parseRow(row);
|
||||||
|
var fruitQuery = new FruitQuery();
|
||||||
|
fruitQuery.where.treeId.equals(row[0]);
|
||||||
|
parsed.fruits =
|
||||||
|
await fruitQuery.get(connection).toList().catchError((_) => []);
|
||||||
|
return parsed;
|
||||||
|
});
|
||||||
|
var output = await Future.wait(futures);
|
||||||
|
output.forEach(ctrl.add);
|
||||||
|
ctrl.close();
|
||||||
|
}).catchError(ctrl.addError);
|
||||||
|
return ctrl.stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Tree> deleteOne(int id, PostgreSQLConnection connection) {
|
||||||
|
var query = new TreeQuery();
|
||||||
|
query.where.id.equals(id);
|
||||||
|
return query.delete(connection).first;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Tree> insert(PostgreSQLConnection connection,
|
||||||
|
{int rings, DateTime createdAt, DateTime updatedAt}) async {
|
||||||
|
var __ormNow__ = new DateTime.now();
|
||||||
|
var result = await connection.query(
|
||||||
|
'INSERT INTO "trees" ("rings", "created_at", "updated_at") VALUES (@rings, @createdAt, @updatedAt) RETURNING "id", "rings", "created_at", "updated_at";',
|
||||||
|
substitutionValues: {
|
||||||
|
'rings': rings,
|
||||||
|
'createdAt': createdAt != null ? createdAt : __ormNow__,
|
||||||
|
'updatedAt': updatedAt != null ? updatedAt : __ormNow__
|
||||||
|
});
|
||||||
|
var output = parseRow(result[0]);
|
||||||
|
var fruitQuery = new FruitQuery();
|
||||||
|
fruitQuery.where.treeId.equals(result[0][0]);
|
||||||
|
output.fruits =
|
||||||
|
await fruitQuery.get(connection).toList().catchError((_) => []);
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Tree> insertTree(PostgreSQLConnection connection, Tree tree) {
|
||||||
|
return TreeQuery.insert(connection,
|
||||||
|
rings: tree.rings,
|
||||||
|
createdAt: tree.createdAt,
|
||||||
|
updatedAt: tree.updatedAt);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Future<Tree> updateTree(PostgreSQLConnection connection, Tree tree) {
|
||||||
|
var query = new TreeQuery();
|
||||||
|
query.where.id.equals(int.parse(tree.id));
|
||||||
|
return query
|
||||||
|
.update(connection,
|
||||||
|
rings: tree.rings,
|
||||||
|
createdAt: tree.createdAt,
|
||||||
|
updatedAt: tree.updatedAt)
|
||||||
|
.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Stream<Tree> getAll(PostgreSQLConnection connection) =>
|
||||||
|
new TreeQuery().get(connection);
|
||||||
|
}
|
||||||
|
|
||||||
|
class TreeQueryWhere {
|
||||||
|
final NumericSqlExpressionBuilder<int> id =
|
||||||
|
new NumericSqlExpressionBuilder<int>();
|
||||||
|
|
||||||
|
final NumericSqlExpressionBuilder<int> rings =
|
||||||
|
new NumericSqlExpressionBuilder<int>();
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder createdAt =
|
||||||
|
new DateTimeSqlExpressionBuilder('trees.created_at');
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder updatedAt =
|
||||||
|
new DateTimeSqlExpressionBuilder('trees.updated_at');
|
||||||
|
|
||||||
|
String toWhereClause({bool keyword}) {
|
||||||
|
final List<String> expressions = [];
|
||||||
|
if (id.hasValue) {
|
||||||
|
expressions.add('trees.id ' + id.compile());
|
||||||
|
}
|
||||||
|
if (rings.hasValue) {
|
||||||
|
expressions.add('trees.rings ' + rings.compile());
|
||||||
|
}
|
||||||
|
if (createdAt.hasValue) {
|
||||||
|
expressions.add(createdAt.compile());
|
||||||
|
}
|
||||||
|
if (updatedAt.hasValue) {
|
||||||
|
expressions.add(updatedAt.compile());
|
||||||
|
}
|
||||||
|
return expressions.isEmpty
|
||||||
|
? null
|
||||||
|
: ((keyword != false ? 'WHERE ' : '') + expressions.join(' AND '));
|
||||||
|
}
|
||||||
|
}
|
150
angel_orm_generator/test/models/tree.service.g.dart
Normal file
150
angel_orm_generator/test/models/tree.service.g.dart
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'tree.dart';
|
||||||
|
import 'tree.orm.g.dart';
|
||||||
|
|
||||||
|
class TreeService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
TreeService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
TreeQuery buildQuery(Map params) {
|
||||||
|
var query = new TreeQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.rings.equals(params['query']['rings']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Tree applyData(data) {
|
||||||
|
if (data is Tree || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new Tree();
|
||||||
|
if (data.containsKey('rings')) {
|
||||||
|
query.rings = data['rings'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<Tree>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Tree> create(data, [Map params]) {
|
||||||
|
return TreeQuery.insertTree(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Tree> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Tree> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Tree> update(id, data, [Map params]) {
|
||||||
|
return TreeQuery.updateTree(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<Tree> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is Tree) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('rings')) {
|
||||||
|
query.rings = data['rings'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await TreeQuery.updateTree(connection, query);
|
||||||
|
}
|
||||||
|
}
|
8
angel_orm_generator/test/models/tree.up.g.sql
Normal file
8
angel_orm_generator/test/models/tree.up.g.sql
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
CREATE TEMPORARY TABLE "trees" (
|
||||||
|
"id" serial,
|
||||||
|
"rings" smallint UNIQUE,
|
||||||
|
"created_at" timestamp,
|
||||||
|
"updated_at" timestamp,
|
||||||
|
UNIQUE(rings),
|
||||||
|
PRIMARY KEY(id)
|
||||||
|
);
|
|
@ -4,7 +4,6 @@ part of angel_orm_generator.test.models.user;
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: JsonModelGenerator
|
// Generator: JsonModelGenerator
|
||||||
// Target: class _User
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
class User extends _User {
|
class User extends _User {
|
||||||
|
@ -21,7 +20,7 @@ class User extends _User {
|
||||||
String email;
|
String email;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
List roles;
|
List<Role> roles;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
DateTime createdAt;
|
DateTime createdAt;
|
||||||
|
@ -44,7 +43,12 @@ class User extends _User {
|
||||||
username: data['username'],
|
username: data['username'],
|
||||||
password: data['password'],
|
password: data['password'],
|
||||||
email: data['email'],
|
email: data['email'],
|
||||||
roles: data['roles'],
|
roles: data['roles'] is List
|
||||||
|
? data['roles']
|
||||||
|
.map((x) =>
|
||||||
|
x == null ? null : (x is Role ? x : new Role.fromJson(x)))
|
||||||
|
.toList()
|
||||||
|
: null,
|
||||||
createdAt: data['created_at'] is DateTime
|
createdAt: data['created_at'] is DateTime
|
||||||
? data['created_at']
|
? data['created_at']
|
||||||
: (data['created_at'] is String
|
: (data['created_at'] is String
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
// Generator: PostgresORMGenerator
|
// Generator: PostgresOrmGenerator
|
||||||
// Target: class _User
|
|
||||||
// **************************************************************************
|
// **************************************************************************
|
||||||
|
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
|
@ -52,7 +51,7 @@ class UserQuery {
|
||||||
var buf = new StringBuffer();
|
var buf = new StringBuffer();
|
||||||
buf.write(prefix != null
|
buf.write(prefix != null
|
||||||
? prefix
|
? prefix
|
||||||
: 'SELECT users.id, users.username, users.password, users.email, users.created_at, users.updated_at, roles.id, roles.name, roles.created_at, roles.updated_at FROM "users"');
|
: 'SELECT users.id, users.username, users.password, users.email, users.created_at, users.updated_at FROM "users"');
|
||||||
if (prefix == null) {}
|
if (prefix == null) {}
|
||||||
var whereClause = where.toWhereClause();
|
var whereClause = where.toWhereClause();
|
||||||
if (whereClause != null) {
|
if (whereClause != null) {
|
||||||
|
|
164
angel_orm_generator/test/models/user.service.g.dart
Normal file
164
angel_orm_generator/test/models/user.service.g.dart
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// Generator: PostgresServiceGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel_framework/angel_framework.dart';
|
||||||
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'user.dart';
|
||||||
|
import 'user.orm.g.dart';
|
||||||
|
|
||||||
|
class UserService extends Service {
|
||||||
|
final PostgreSQLConnection connection;
|
||||||
|
|
||||||
|
final bool allowRemoveAll;
|
||||||
|
|
||||||
|
final bool allowQuery;
|
||||||
|
|
||||||
|
UserService(this.connection,
|
||||||
|
{this.allowRemoveAll: false, this.allowQuery: false});
|
||||||
|
|
||||||
|
UserQuery buildQuery(Map params) {
|
||||||
|
var query = new UserQuery();
|
||||||
|
if (params['query'] is Map) {
|
||||||
|
query.where.id.equals(params['query']['id']);
|
||||||
|
query.where.username.equals(params['query']['username']);
|
||||||
|
query.where.password.equals(params['query']['password']);
|
||||||
|
query.where.email.equals(params['query']['email']);
|
||||||
|
query.where.createdAt.equals(params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at'] != null
|
||||||
|
? params['query']['created_at'] is String
|
||||||
|
? DateTime.parse(params['query']['created_at'])
|
||||||
|
: params['query']['created_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
query.where.updatedAt.equals(params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at'] != null
|
||||||
|
? params['query']['updated_at'] is String
|
||||||
|
? DateTime.parse(params['query']['updated_at'])
|
||||||
|
: params['query']['updated_at']
|
||||||
|
: new DateTime.now());
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
int toId(id) {
|
||||||
|
if (id is int) {
|
||||||
|
return id;
|
||||||
|
} else {
|
||||||
|
if (id == 'null' || id == null) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
return int.parse(id.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
User applyData(data) {
|
||||||
|
if (data is User || data == null) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
var query = new User();
|
||||||
|
if (data.containsKey('username')) {
|
||||||
|
query.username = data['username'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('password')) {
|
||||||
|
query.password = data['password'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('email')) {
|
||||||
|
query.email = data['email'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
return query;
|
||||||
|
} else
|
||||||
|
throw new AngelHttpException.badRequest(message: 'Invalid data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<List<User>> index([Map params]) {
|
||||||
|
return buildQuery(params).get(connection).toList();
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<User> create(data, [Map params]) {
|
||||||
|
return UserQuery.insertUser(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<User> read(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.get(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<User> remove(id, [Map params]) {
|
||||||
|
var query = buildQuery(params);
|
||||||
|
query.where.id.equals(toId(id));
|
||||||
|
return query.delete(connection).first.catchError((_) {
|
||||||
|
new AngelHttpException.notFound(
|
||||||
|
message: 'No record found for ID ' + id.toString());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<User> update(id, data, [Map params]) {
|
||||||
|
return UserQuery.updateUser(connection, applyData(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<User> modify(id, data, [Map params]) async {
|
||||||
|
var query = await read(toId(id), params);
|
||||||
|
if (data is User) {
|
||||||
|
query = data;
|
||||||
|
}
|
||||||
|
if (data is Map) {
|
||||||
|
if (data.containsKey('username')) {
|
||||||
|
query.username = data['username'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('password')) {
|
||||||
|
query.password = data['password'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('email')) {
|
||||||
|
query.email = data['email'];
|
||||||
|
}
|
||||||
|
if (data.containsKey('created_at')) {
|
||||||
|
query.createdAt = data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at'] != null
|
||||||
|
? data['created_at'] is String
|
||||||
|
? DateTime.parse(data['created_at'])
|
||||||
|
: data['created_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
if (data.containsKey('updated_at')) {
|
||||||
|
query.updatedAt = data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at'] != null
|
||||||
|
? data['updated_at'] is String
|
||||||
|
? DateTime.parse(data['updated_at'])
|
||||||
|
: data['updated_at']
|
||||||
|
: new DateTime.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await UserQuery.updateUser(connection, query);
|
||||||
|
}
|
||||||
|
}
|
64
angel_orm_generator/tool/actions.dart
Normal file
64
angel_orm_generator/tool/actions.dart
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
import 'package:build_runner/build_runner.dart';
|
||||||
|
import 'package:source_gen/source_gen.dart';
|
||||||
|
import 'package:angel_orm_generator/angel_orm_generator.dart';
|
||||||
|
import 'package:angel_serialize_generator/angel_serialize_generator.dart';
|
||||||
|
|
||||||
|
const String packageName = 'angel_orm_generator';
|
||||||
|
const List<String> allModels = const ['test/models/*.dart'];
|
||||||
|
const List<String> standaloneModels = const [
|
||||||
|
'test/models/author.dart',
|
||||||
|
'test/models/car.dart',
|
||||||
|
'test/models/foot.dart',
|
||||||
|
'test/models/fruit.dart',
|
||||||
|
'test/models/role.dart'
|
||||||
|
];
|
||||||
|
const List<String> dependentModels = const [
|
||||||
|
'test/models/book.dart',
|
||||||
|
'test/models/leg.dart',
|
||||||
|
'test/models/tree.dart',
|
||||||
|
'test/models/user.dart'
|
||||||
|
];
|
||||||
|
|
||||||
|
final List<BuildAction> actions = [
|
||||||
|
new BuildAction(
|
||||||
|
new PartBuilder(const [const JsonModelGenerator()]),
|
||||||
|
packageName,
|
||||||
|
inputs: standaloneModels,
|
||||||
|
),
|
||||||
|
new BuildAction(
|
||||||
|
new PartBuilder(const [const JsonModelGenerator()]),
|
||||||
|
packageName,
|
||||||
|
inputs: dependentModels,
|
||||||
|
),
|
||||||
|
new BuildAction(
|
||||||
|
new LibraryBuilder(
|
||||||
|
const PostgresOrmGenerator(),
|
||||||
|
generatedExtension: '.orm.g.dart',
|
||||||
|
),
|
||||||
|
packageName,
|
||||||
|
inputs: standaloneModels,
|
||||||
|
),
|
||||||
|
new BuildAction(
|
||||||
|
new LibraryBuilder(
|
||||||
|
const PostgresOrmGenerator(),
|
||||||
|
generatedExtension: '.orm.g.dart',
|
||||||
|
),
|
||||||
|
packageName,
|
||||||
|
inputs: dependentModels,
|
||||||
|
),
|
||||||
|
new BuildAction(
|
||||||
|
new LibraryBuilder(
|
||||||
|
const PostgresServiceGenerator(),
|
||||||
|
generatedExtension: '.service.g.dart',
|
||||||
|
),
|
||||||
|
packageName,
|
||||||
|
inputs: allModels,
|
||||||
|
),
|
||||||
|
new BuildAction(
|
||||||
|
const SqlMigrationBuilder(
|
||||||
|
temporary: true,
|
||||||
|
),
|
||||||
|
packageName,
|
||||||
|
inputs: allModels,
|
||||||
|
),
|
||||||
|
];
|
|
@ -1,4 +1,4 @@
|
||||||
import 'package:build_runner/build_runner.dart';
|
import 'package:build_runner/build_runner.dart';
|
||||||
import 'phases.dart';
|
import 'actions.dart';
|
||||||
|
|
||||||
main() => build(PHASES, deleteFilesByDefault: true);
|
main() => build(actions, deleteFilesByDefault: true);
|
||||||
|
|
|
@ -1,42 +0,0 @@
|
||||||
import 'package:build_runner/build_runner.dart';
|
|
||||||
import 'package:source_gen/source_gen.dart';
|
|
||||||
import 'package:angel_orm_generator/angel_orm_generator.dart';
|
|
||||||
import 'package:angel_serialize_generator/angel_serialize_generator.dart';
|
|
||||||
|
|
||||||
final InputSet ALL_MODELS =
|
|
||||||
new InputSet('angel_orm_generator', const ['test/models/*.dart']);
|
|
||||||
final InputSet STANDALONE_MODELS = new InputSet('angel_orm_generator', const [
|
|
||||||
'test/models/author.dart',
|
|
||||||
'test/models/car.dart',
|
|
||||||
'test/models/foot.dart',
|
|
||||||
'test/models/role.dart'
|
|
||||||
]);
|
|
||||||
final InputSet DEPENDENT_MODELS = new InputSet('angel_orm_generator', const [
|
|
||||||
'test/models/book.dart',
|
|
||||||
'test/models/leg.dart',
|
|
||||||
'test/models/user.dart'
|
|
||||||
]);
|
|
||||||
|
|
||||||
final PhaseGroup PHASES = new PhaseGroup()
|
|
||||||
..addPhase(new Phase()
|
|
||||||
..addAction(
|
|
||||||
new GeneratorBuilder([const JsonModelGenerator()]), STANDALONE_MODELS)
|
|
||||||
..addAction(
|
|
||||||
new GeneratorBuilder([const JsonModelGenerator()]), DEPENDENT_MODELS))
|
|
||||||
..addPhase(new Phase()
|
|
||||||
..addAction(
|
|
||||||
new GeneratorBuilder([new PostgresORMGenerator()],
|
|
||||||
isStandalone: true, generatedExtension: '.orm.g.dart'),
|
|
||||||
STANDALONE_MODELS))
|
|
||||||
..addPhase(new Phase()
|
|
||||||
..addAction(
|
|
||||||
new GeneratorBuilder([new PostgresORMGenerator()],
|
|
||||||
isStandalone: true, generatedExtension: '.orm.g.dart'),
|
|
||||||
DEPENDENT_MODELS))
|
|
||||||
..addPhase(new Phase()
|
|
||||||
..addAction(
|
|
||||||
new GeneratorBuilder([new PostgresServiceGenerator()],
|
|
||||||
isStandalone: true, generatedExtension: '.service.g.dart'),
|
|
||||||
ALL_MODELS))
|
|
||||||
..addPhase(new Phase()
|
|
||||||
..addAction(new SQLMigrationGenerator(temporary: true), ALL_MODELS));
|
|
|
@ -1,4 +1,4 @@
|
||||||
import 'package:build_runner/build_runner.dart';
|
import 'package:build_runner/build_runner.dart';
|
||||||
import 'phases.dart';
|
import 'actions.dart';
|
||||||
|
|
||||||
main() => watch(PHASES, deleteFilesByDefault: true);
|
main() => watch(actions, deleteFilesByDefault: true);
|
||||||
|
|
Loading…
Reference in a new issue