Fixed ORM Generator
This commit is contained in:
parent
342bdb322e
commit
630b301555
68 changed files with 2941 additions and 478 deletions
|
@ -1,5 +1,9 @@
|
||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## 4.0.2
|
||||||
|
|
||||||
|
* Updated default varchar column size to 255
|
||||||
|
|
||||||
## 4.0.1
|
## 4.0.1
|
||||||
|
|
||||||
* Updated linter to `package:lints`
|
* Updated linter to `package:lints`
|
||||||
|
|
|
@ -7,6 +7,8 @@
|
||||||
|
|
||||||
A database migration framework built for Angel3 ORM.
|
A database migration framework built for Angel3 ORM.
|
||||||
|
|
||||||
Supported database:
|
## Supported database
|
||||||
|
|
||||||
* postgresql version 10, 11, 12, 13 and 14
|
* PostgreSQL version 10 or later
|
||||||
|
* MySQL 8.0 or later
|
||||||
|
* MariaBb 10.2.1 or later
|
||||||
|
|
|
@ -19,7 +19,7 @@ class MigrationColumn extends Column {
|
||||||
|
|
||||||
MigrationColumn(ColumnType type,
|
MigrationColumn(ColumnType type,
|
||||||
{bool isNullable = true,
|
{bool isNullable = true,
|
||||||
int length = 256,
|
int length = 255,
|
||||||
IndexType indexType = IndexType.standardIndex,
|
IndexType indexType = IndexType.standardIndex,
|
||||||
defaultValue})
|
defaultValue})
|
||||||
: super(type: type, length: length) {
|
: super(type: type, length: length) {
|
||||||
|
|
|
@ -4,7 +4,9 @@ abstract class Schema {
|
||||||
void drop(String tableName, {bool cascade = false});
|
void drop(String tableName, {bool cascade = false});
|
||||||
|
|
||||||
void dropAll(Iterable<String> tableNames, {bool cascade = false}) {
|
void dropAll(Iterable<String> tableNames, {bool cascade = false}) {
|
||||||
tableNames.forEach((n) => drop(n, cascade: cascade));
|
for (var n in tableNames) {
|
||||||
|
drop(n, cascade: cascade);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void create(String tableName, void Function(Table table) callback);
|
void create(String tableName, void Function(Table table) callback);
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: angel3_migration
|
name: angel3_migration
|
||||||
version: 4.0.1
|
version: 4.0.2
|
||||||
description: Database migration runtime for Angel3 ORM. Use this package to define schemas.
|
description: Database migration runtime for Angel3 ORM. Use this package to define schemas.
|
||||||
homepage: https://angel3-framework.web.app/
|
homepage: https://angel3-framework.web.app/
|
||||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration
|
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## 4.1.0
|
||||||
|
|
||||||
|
* Added support `MySQL` database support
|
||||||
|
|
||||||
## 4.0.2
|
## 4.0.2
|
||||||
|
|
||||||
* Updated README
|
* Updated README
|
||||||
|
|
|
@ -9,4 +9,6 @@ Command-line based database migration runner for Angel3 ORM.
|
||||||
|
|
||||||
Supported database:
|
Supported database:
|
||||||
|
|
||||||
* postgresql version 10, 11, 12, 13 and 14
|
* Postgresql version 10, 11, 12, 13 and 14
|
||||||
|
* MySQL version 5, 6 and 7
|
||||||
|
* Maria 10
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_migration_runner/angel3_migration_runner.dart';
|
import 'package:angel3_migration_runner/angel3_migration_runner.dart';
|
||||||
import 'package:angel3_migration_runner/postgres.dart';
|
import 'package:angel3_migration_runner/postgres.dart';
|
||||||
|
import 'package:angel3_migration_runner/mysql.dart';
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:postgres/postgres.dart';
|
import 'package:postgres/postgres.dart';
|
||||||
|
import 'package:mysql1/mysql1.dart';
|
||||||
|
|
||||||
import 'todo.dart';
|
import 'todo.dart';
|
||||||
|
|
||||||
var migrationRunner = PostgresMigrationRunner(
|
var postgresqlMigrationRunner = PostgresMigrationRunner(
|
||||||
PostgreSQLConnection('127.0.0.1', 5432, 'test',
|
PostgreSQLConnection('127.0.0.1', 5432, 'demo',
|
||||||
username: 'postgres', password: 'postgres'),
|
username: 'demouser', password: 'demo123'),
|
||||||
migrations: [
|
migrations: [
|
||||||
UserMigration(),
|
UserMigration(),
|
||||||
TodoMigration(),
|
TodoMigration(),
|
||||||
|
@ -15,7 +18,21 @@ var migrationRunner = PostgresMigrationRunner(
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
void main(List<String> args) => runMigrations(migrationRunner, args);
|
var mysqlMigrationRunner = MysqlMigrationRunner(
|
||||||
|
ConnectionSettings(
|
||||||
|
host: 'localhost',
|
||||||
|
port: 3306,
|
||||||
|
user: 'demouser',
|
||||||
|
password: 'demo123',
|
||||||
|
db: 'demo'),
|
||||||
|
migrations: [
|
||||||
|
UserMigration(),
|
||||||
|
TodoMigration(),
|
||||||
|
FooMigration(),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
void main(List<String> args) => runMigrations(postgresqlMigrationRunner, args);
|
||||||
|
|
||||||
class FooMigration extends Migration {
|
class FooMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
|
|
3
packages/orm/angel_migration_runner/lib/mysql.dart
Normal file
3
packages/orm/angel_migration_runner/lib/mysql.dart
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
export 'src/mysql/runner.dart';
|
||||||
|
export 'src/mysql/schema.dart';
|
||||||
|
export 'src/mysql/table.dart';
|
157
packages/orm/angel_migration_runner/lib/src/mysql/runner.dart
Normal file
157
packages/orm/angel_migration_runner/lib/src/mysql/runner.dart
Normal file
|
@ -0,0 +1,157 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'dart:collection';
|
||||||
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
|
import 'package:logging/logging.dart';
|
||||||
|
import 'package:mysql1/mysql1.dart';
|
||||||
|
import '../runner.dart';
|
||||||
|
import '../util.dart';
|
||||||
|
import 'schema.dart';
|
||||||
|
|
||||||
|
class MysqlMigrationRunner implements MigrationRunner {
|
||||||
|
final _log = Logger('PostgresMigrationRunner');
|
||||||
|
|
||||||
|
final Map<String, Migration> migrations = {};
|
||||||
|
final ConnectionSettings settings;
|
||||||
|
final Queue<Migration> _migrationQueue = Queue();
|
||||||
|
late MySqlConnection connection;
|
||||||
|
bool _connected = false;
|
||||||
|
|
||||||
|
MysqlMigrationRunner(this.settings,
|
||||||
|
{Iterable<Migration> migrations = const [], bool connected = false}) {
|
||||||
|
if (migrations.isNotEmpty == true) migrations.forEach(addMigration);
|
||||||
|
_connected = connected == true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void addMigration(Migration migration) {
|
||||||
|
_migrationQueue.addLast(migration);
|
||||||
|
}
|
||||||
|
|
||||||
|
Future _init() async {
|
||||||
|
while (_migrationQueue.isNotEmpty) {
|
||||||
|
var migration = _migrationQueue.removeFirst();
|
||||||
|
var path = await absoluteSourcePath(migration.runtimeType);
|
||||||
|
migrations.putIfAbsent(path.replaceAll('\\', '\\\\'), () => migration);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!_connected) {
|
||||||
|
connection = await MySqlConnection.connect(settings);
|
||||||
|
_connected = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
await connection.query('''
|
||||||
|
CREATE TABLE IF NOT EXISTS "migrations" (
|
||||||
|
id serial,
|
||||||
|
batch integer,
|
||||||
|
path varchar,
|
||||||
|
PRIMARY KEY(id)
|
||||||
|
);
|
||||||
|
''').then((result) {
|
||||||
|
_log.info('Check and create "migrations" table');
|
||||||
|
}).catchError((e) {
|
||||||
|
_log.severe('Failed to create "migrations" table.');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future up() async {
|
||||||
|
await _init();
|
||||||
|
var r = await connection.query('SELECT path from migrations;');
|
||||||
|
var existing = r.expand((x) => x).cast<String>();
|
||||||
|
var toRun = <String>[];
|
||||||
|
|
||||||
|
migrations.forEach((k, v) {
|
||||||
|
if (!existing.contains(k)) toRun.add(k);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (toRun.isNotEmpty) {
|
||||||
|
var r = await connection.query('SELECT MAX(batch) from migrations;');
|
||||||
|
var rTmp = r.toList();
|
||||||
|
var curBatch = (rTmp[0][0] ?? 0) as int;
|
||||||
|
var batch = curBatch + 1;
|
||||||
|
|
||||||
|
for (var k in toRun) {
|
||||||
|
var migration = migrations[k]!;
|
||||||
|
var schema = MysqlSchema();
|
||||||
|
migration.up(schema);
|
||||||
|
_log.info('Added "$k" into "migrations" table.');
|
||||||
|
await schema.run(connection).then((_) {
|
||||||
|
return connection.transaction((ctx) async {
|
||||||
|
var result = await ctx.query(
|
||||||
|
"INSERT INTO MIGRATIONS (batch, path) VALUES ($batch, '$k')");
|
||||||
|
|
||||||
|
return result.affectedRowCount;
|
||||||
|
});
|
||||||
|
//return connection.execute(
|
||||||
|
// 'INSERT INTO MIGRATIONS (batch, path) VALUES ($batch, \'$k\');');
|
||||||
|
}).catchError((e) {
|
||||||
|
_log.severe('Failed to insert into "migrations" table.');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_log.warning('Nothing to add into "migrations" table.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future rollback() async {
|
||||||
|
await _init();
|
||||||
|
|
||||||
|
var r = await connection.query('SELECT MAX(batch) from migrations;');
|
||||||
|
var rTmp = r.toList();
|
||||||
|
var curBatch = (rTmp[0][0] ?? 0) as int;
|
||||||
|
|
||||||
|
r = await connection
|
||||||
|
.query('SELECT path from migrations WHERE batch = $curBatch;');
|
||||||
|
var existing = r.expand((x) => x).cast<String>();
|
||||||
|
var toRun = <String>[];
|
||||||
|
|
||||||
|
migrations.forEach((k, v) {
|
||||||
|
if (existing.contains(k)) toRun.add(k);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (toRun.isNotEmpty) {
|
||||||
|
for (var k in toRun.reversed) {
|
||||||
|
var migration = migrations[k]!;
|
||||||
|
var schema = MysqlSchema();
|
||||||
|
migration.down(schema);
|
||||||
|
_log.info('Removed "$k" from "migrations" table.');
|
||||||
|
await schema.run(connection).then((_) {
|
||||||
|
return connection
|
||||||
|
.query('DELETE FROM migrations WHERE path = \'$k\';');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_log.warning('Nothing to remove from "migrations" table.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future reset() async {
|
||||||
|
await _init();
|
||||||
|
var r = await connection
|
||||||
|
.query('SELECT path from migrations ORDER BY batch DESC;');
|
||||||
|
var existing = r.expand((x) => x).cast<String>();
|
||||||
|
var toRun = existing.where(migrations.containsKey).toList();
|
||||||
|
|
||||||
|
if (toRun.isNotEmpty) {
|
||||||
|
for (var k in toRun.reversed) {
|
||||||
|
var migration = migrations[k]!;
|
||||||
|
var schema = MysqlSchema();
|
||||||
|
migration.down(schema);
|
||||||
|
_log.info('Removed "$k" from "migrations" table.');
|
||||||
|
await schema.run(connection).then((_) {
|
||||||
|
return connection
|
||||||
|
.query('DELETE FROM migrations WHERE path = \'$k\';');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_log.warning('Nothing to remove from "migrations" table.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future close() {
|
||||||
|
return connection.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,74 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
|
import 'package:angel3_migration_runner/src/mysql/table.dart';
|
||||||
|
import 'package:logging/logging.dart';
|
||||||
|
import 'package:mysql1/mysql1.dart';
|
||||||
|
|
||||||
|
class MysqlSchema extends Schema {
|
||||||
|
final _log = Logger('MysqlSchema');
|
||||||
|
|
||||||
|
final int _indent;
|
||||||
|
final StringBuffer _buf;
|
||||||
|
|
||||||
|
MysqlSchema._(this._buf, this._indent);
|
||||||
|
|
||||||
|
factory MysqlSchema() => MysqlSchema._(StringBuffer(), 0);
|
||||||
|
|
||||||
|
Future<int> run(MySqlConnection connection) async {
|
||||||
|
//return connection.execute(compile());
|
||||||
|
var result = await connection.transaction((ctx) async {
|
||||||
|
var sql = compile();
|
||||||
|
var result = await ctx.query(sql).catchError((e) {
|
||||||
|
_log.severe('Failed to run query: [ $sql ]', e);
|
||||||
|
});
|
||||||
|
return result.affectedRowCount;
|
||||||
|
});
|
||||||
|
|
||||||
|
return (result is int) ? result : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
String compile() => _buf.toString();
|
||||||
|
|
||||||
|
void _writeln(String str) {
|
||||||
|
for (var i = 0; i < _indent; i++) {
|
||||||
|
_buf.write(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
_buf.writeln(str);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void drop(String tableName, {bool cascade = false}) {
|
||||||
|
var c = cascade == true ? ' CASCADE' : '';
|
||||||
|
_writeln('DROP TABLE "$tableName"$c;');
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void alter(String tableName, void Function(MutableTable table) callback) {
|
||||||
|
var tbl = MysqlAlterTable(tableName);
|
||||||
|
callback(tbl);
|
||||||
|
_writeln('ALTER TABLE "$tableName"');
|
||||||
|
tbl.compile(_buf, _indent + 1);
|
||||||
|
_buf.write(';');
|
||||||
|
}
|
||||||
|
|
||||||
|
void _create(
|
||||||
|
String tableName, void Function(Table table) callback, bool ifNotExists) {
|
||||||
|
var op = ifNotExists ? ' IF NOT EXISTS' : '';
|
||||||
|
var tbl = MysqlTable();
|
||||||
|
callback(tbl);
|
||||||
|
_writeln('CREATE TABLE$op "$tableName" (');
|
||||||
|
tbl.compile(_buf, _indent + 1);
|
||||||
|
_buf.writeln();
|
||||||
|
_writeln(');');
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void create(String tableName, void Function(Table table) callback) =>
|
||||||
|
_create(tableName, callback, false);
|
||||||
|
|
||||||
|
@override
|
||||||
|
void createIfNotExists(
|
||||||
|
String tableName, void Function(Table table) callback) =>
|
||||||
|
_create(tableName, callback, true);
|
||||||
|
}
|
169
packages/orm/angel_migration_runner/lib/src/mysql/table.dart
Normal file
169
packages/orm/angel_migration_runner/lib/src/mysql/table.dart
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
import 'dart:collection';
|
||||||
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
|
import 'package:charcode/ascii.dart';
|
||||||
|
|
||||||
|
abstract class MysqlGenerator {
|
||||||
|
static String columnType(MigrationColumn column) {
|
||||||
|
var str = column.type.name;
|
||||||
|
if (column.type.hasSize) {
|
||||||
|
return '$str(${column.length})';
|
||||||
|
} else {
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static String compileColumn(MigrationColumn column) {
|
||||||
|
var buf = StringBuffer(columnType(column));
|
||||||
|
|
||||||
|
if (column.isNullable == false) buf.write(' NOT NULL');
|
||||||
|
if (column.defaultValue != null) {
|
||||||
|
String s;
|
||||||
|
var value = column.defaultValue;
|
||||||
|
if (value is RawSql) {
|
||||||
|
s = value.value;
|
||||||
|
} else if (value is String) {
|
||||||
|
var b = StringBuffer();
|
||||||
|
for (var ch in value.codeUnits) {
|
||||||
|
if (ch == $single_quote) {
|
||||||
|
b.write("\\'");
|
||||||
|
} else {
|
||||||
|
b.writeCharCode(ch);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s = b.toString();
|
||||||
|
} else {
|
||||||
|
s = value.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write(' DEFAULT $s');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (column.indexType == IndexType.unique) {
|
||||||
|
buf.write(' UNIQUE');
|
||||||
|
} else if (column.indexType == IndexType.primaryKey) {
|
||||||
|
buf.write(' PRIMARY KEY');
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var ref in column.externalReferences) {
|
||||||
|
buf.write(' ' + compileReference(ref));
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
static String compileReference(MigrationColumnReference ref) {
|
||||||
|
var buf =
|
||||||
|
StringBuffer('REFERENCES "${ref.foreignTable}"("${ref.foreignKey}")');
|
||||||
|
if (ref.behavior != null) buf.write(' ' + ref.behavior!);
|
||||||
|
return buf.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class MysqlTable extends Table {
|
||||||
|
final Map<String, MigrationColumn> _columns = {};
|
||||||
|
|
||||||
|
@override
|
||||||
|
MigrationColumn declareColumn(String name, Column column) {
|
||||||
|
if (_columns.containsKey(name)) {
|
||||||
|
throw StateError('Cannot redeclare column "$name".');
|
||||||
|
}
|
||||||
|
var col = MigrationColumn.from(column);
|
||||||
|
_columns[name] = col;
|
||||||
|
return col;
|
||||||
|
}
|
||||||
|
|
||||||
|
void compile(StringBuffer buf, int indent) {
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
_columns.forEach((name, column) {
|
||||||
|
var col = MysqlGenerator.compileColumn(column);
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
|
||||||
|
for (var i = 0; i < indent; i++) {
|
||||||
|
buf.write(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write('"$name" $col');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class MysqlAlterTable extends Table implements MutableTable {
|
||||||
|
final Map<String, MigrationColumn> _columns = {};
|
||||||
|
final String tableName;
|
||||||
|
final Queue<String> _stack = Queue<String>();
|
||||||
|
|
||||||
|
MysqlAlterTable(this.tableName);
|
||||||
|
|
||||||
|
void compile(StringBuffer buf, int indent) {
|
||||||
|
var i = 0;
|
||||||
|
|
||||||
|
while (_stack.isNotEmpty) {
|
||||||
|
var str = _stack.removeFirst();
|
||||||
|
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
|
||||||
|
for (var i = 0; i < indent; i++) {
|
||||||
|
buf.write(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write(str);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (i > 0) buf.writeln(';');
|
||||||
|
|
||||||
|
i = 0;
|
||||||
|
_columns.forEach((name, column) {
|
||||||
|
var col = MysqlGenerator.compileColumn(column);
|
||||||
|
if (i++ > 0) buf.writeln(',');
|
||||||
|
|
||||||
|
for (var i = 0; i < indent; i++) {
|
||||||
|
buf.write(' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
buf.write('ADD COLUMN "$name" $col');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
MigrationColumn declareColumn(String name, Column column) {
|
||||||
|
if (_columns.containsKey(name)) {
|
||||||
|
throw StateError('Cannot redeclare column "$name".');
|
||||||
|
}
|
||||||
|
var col = MigrationColumn.from(column);
|
||||||
|
_columns[name] = col;
|
||||||
|
return col;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void dropNotNull(String name) {
|
||||||
|
_stack.add('ALTER COLUMN "$name" DROP NOT NULL');
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void setNotNull(String name) {
|
||||||
|
_stack.add('ALTER COLUMN "$name" SET NOT NULL');
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void changeColumnType(String name, ColumnType type, {int length = 256}) {
|
||||||
|
_stack.add('ALTER COLUMN "$name" TYPE ' +
|
||||||
|
MysqlGenerator.columnType(MigrationColumn(type, length: length)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void renameColumn(String name, String newName) {
|
||||||
|
_stack.add('RENAME COLUMN "$name" TO "$newName"');
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void dropColumn(String name) {
|
||||||
|
_stack.add('DROP COLUMN "$name"');
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void rename(String newName) {
|
||||||
|
_stack.add('RENAME TO "$newName"');
|
||||||
|
}
|
||||||
|
}
|
|
@ -76,7 +76,7 @@ class PostgresMigrationRunner implements MigrationRunner {
|
||||||
await schema.run(connection).then((_) {
|
await schema.run(connection).then((_) {
|
||||||
return connection.transaction((ctx) async {
|
return connection.transaction((ctx) async {
|
||||||
var result = await ctx.query(
|
var result = await ctx.query(
|
||||||
"INSERT INTO MIGRATIONS (batch, path) VALUES ($batch, \'$k\')");
|
"INSERT INTO MIGRATIONS (batch, path) VALUES ($batch, '$k')");
|
||||||
|
|
||||||
return result.affectedRowCount;
|
return result.affectedRowCount;
|
||||||
});
|
});
|
||||||
|
@ -95,7 +95,8 @@ class PostgresMigrationRunner implements MigrationRunner {
|
||||||
Future rollback() async {
|
Future rollback() async {
|
||||||
await _init();
|
await _init();
|
||||||
|
|
||||||
var r = await connection.query('SELECT MAX(batch) from migrations;');
|
PostgreSQLResult r =
|
||||||
|
await connection.query('SELECT MAX(batch) from migrations;');
|
||||||
var curBatch = (r[0][0] ?? 0) as int;
|
var curBatch = (r[0][0] ?? 0) as int;
|
||||||
r = await connection
|
r = await connection
|
||||||
.query('SELECT path from migrations WHERE batch = $curBatch;');
|
.query('SELECT path from migrations WHERE batch = $curBatch;');
|
||||||
|
|
|
@ -9,7 +9,7 @@ abstract class PostgresGenerator {
|
||||||
if (column.type.hasSize) {
|
if (column.type.hasSize) {
|
||||||
return '$str(${column.length})';
|
return '$str(${column.length})';
|
||||||
} else {
|
} else {
|
||||||
return '$str';
|
return str;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: angel3_migration_runner
|
name: angel3_migration_runner
|
||||||
version: 4.0.2
|
version: 4.1.0
|
||||||
description: Command-line based database migration runner for Angel3's ORM.
|
description: Command-line based database migration runner for Angel3's ORM.
|
||||||
homepage: https://angel3-framework.web.app/
|
homepage: https://angel3-framework.web.app/
|
||||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration_runner
|
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration_runner
|
||||||
|
@ -10,7 +10,8 @@ dependencies:
|
||||||
angel3_orm: ^4.0.0
|
angel3_orm: ^4.0.0
|
||||||
args: ^2.1.0
|
args: ^2.1.0
|
||||||
charcode: ^1.2.0
|
charcode: ^1.2.0
|
||||||
postgres: ^2.3.2
|
postgres: ^2.4.0
|
||||||
|
mysql1: ^0.19.2
|
||||||
logging: ^1.0.0
|
logging: ^1.0.0
|
||||||
dev_dependencies:
|
dev_dependencies:
|
||||||
lints: ^1.0.0
|
lints: ^1.0.0
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## 4.0.4
|
||||||
|
|
||||||
|
* Changed default varchar size to 255
|
||||||
|
* Changed default primary key to serial
|
||||||
|
|
||||||
## 4.0.3
|
## 4.0.3
|
||||||
|
|
||||||
* Removed debugging messages
|
* Removed debugging messages
|
||||||
|
|
|
@ -7,4 +7,9 @@
|
||||||
|
|
||||||
Runtime support for Angel3 ORM. Includes a clean, database-agnostic query builder and relationship/join support.
|
Runtime support for Angel3 ORM. Includes a clean, database-agnostic query builder and relationship/join support.
|
||||||
|
|
||||||
|
## Supported database
|
||||||
|
|
||||||
|
* PostgreSQL version 10, 11, 12, 13 and 14
|
||||||
|
* MySQL 8.0 or later
|
||||||
|
|
||||||
For documentation about the ORM, see [Developer Guide](https://angel3-docs.dukefirehawk.com/guides/orm)
|
For documentation about the ORM, see [Developer Guide](https://angel3-docs.dukefirehawk.com/guides/orm)
|
||||||
|
|
|
@ -17,6 +17,9 @@ class Column {
|
||||||
/// If `true`, a SQL field will be nullable.
|
/// If `true`, a SQL field will be nullable.
|
||||||
final bool isNullable;
|
final bool isNullable;
|
||||||
|
|
||||||
|
/// Specifies this column name.
|
||||||
|
final String name;
|
||||||
|
|
||||||
/// Specifies the length of a `VARCHAR`.
|
/// Specifies the length of a `VARCHAR`.
|
||||||
final int length;
|
final int length;
|
||||||
|
|
||||||
|
@ -31,7 +34,8 @@ class Column {
|
||||||
|
|
||||||
const Column(
|
const Column(
|
||||||
{this.isNullable = true,
|
{this.isNullable = true,
|
||||||
this.length = 256,
|
this.length = 255,
|
||||||
|
this.name = "",
|
||||||
this.type = ColumnType.varChar,
|
this.type = ColumnType.varChar,
|
||||||
this.indexType = IndexType.none,
|
this.indexType = IndexType.none,
|
||||||
this.expression});
|
this.expression});
|
||||||
|
@ -41,7 +45,7 @@ class Column {
|
||||||
}
|
}
|
||||||
|
|
||||||
class PrimaryKey extends Column {
|
class PrimaryKey extends Column {
|
||||||
const PrimaryKey({ColumnType columnType = ColumnType.varChar})
|
const PrimaryKey({ColumnType columnType = ColumnType.serial})
|
||||||
: super(type: columnType, indexType: IndexType.primaryKey);
|
: super(type: columnType, indexType: IndexType.primaryKey);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import 'dart:async';
|
import 'dart:async';
|
||||||
import 'package:logging/logging.dart';
|
|
||||||
|
|
||||||
import 'query_executor.dart';
|
import 'query_executor.dart';
|
||||||
import 'union.dart';
|
import 'union.dart';
|
||||||
|
@ -7,8 +6,6 @@ import 'package:optional/optional.dart';
|
||||||
|
|
||||||
/// A base class for objects that compile to SQL queries, typically within an ORM.
|
/// A base class for objects that compile to SQL queries, typically within an ORM.
|
||||||
abstract class QueryBase<T> {
|
abstract class QueryBase<T> {
|
||||||
final _log = Logger('QueryBase');
|
|
||||||
|
|
||||||
/// Casts to perform when querying the database.
|
/// Casts to perform when querying the database.
|
||||||
Map<String, String> get casts => {};
|
Map<String, String> get casts => {};
|
||||||
|
|
||||||
|
|
|
@ -12,14 +12,14 @@ class Relationship {
|
||||||
final String? localKey;
|
final String? localKey;
|
||||||
final String? foreignKey;
|
final String? foreignKey;
|
||||||
final String? foreignTable;
|
final String? foreignTable;
|
||||||
final bool? cascadeOnDelete;
|
final bool cascadeOnDelete;
|
||||||
final JoinType? joinType;
|
final JoinType? joinType;
|
||||||
|
|
||||||
const Relationship(this.type,
|
const Relationship(this.type,
|
||||||
{this.localKey,
|
{this.localKey,
|
||||||
this.foreignKey,
|
this.foreignKey,
|
||||||
this.foreignTable,
|
this.foreignTable,
|
||||||
this.cascadeOnDelete,
|
this.cascadeOnDelete = false,
|
||||||
this.joinType});
|
this.joinType});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: angel3_orm
|
name: angel3_orm
|
||||||
version: 4.0.3
|
version: 4.0.4
|
||||||
description: Runtime support for Angel3 ORM. Includes base classes for queries.
|
description: Runtime support for Angel3 ORM. Includes base classes for queries.
|
||||||
homepage: https://angel3-framework.web.app/
|
homepage: https://angel3-framework.web.app/
|
||||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm
|
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## 4.1.3
|
||||||
|
|
||||||
|
* Fixed issue #24, incorrect class casting
|
||||||
|
* Fixed `@belongsTo` to generate correct table creation query
|
||||||
|
|
||||||
## 4.1.2
|
## 4.1.2
|
||||||
|
|
||||||
* Updated linter to `package:lints`
|
* Updated linter to `package:lints`
|
||||||
|
|
|
@ -19,3 +19,8 @@ Run the following command to generate the required `.g.dart` files for Angel3 OR
|
||||||
```bash
|
```bash
|
||||||
dart run build_runner build
|
dart run build_runner build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Supported database
|
||||||
|
|
||||||
|
* PostgreSQL version 10, 11, 12, 13 and 14
|
||||||
|
* MySQL 8.0 or later
|
||||||
|
|
|
@ -73,6 +73,7 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
|
|
||||||
Method buildUpMigration(OrmBuildContext ctx, LibraryBuilder lib) {
|
Method buildUpMigration(OrmBuildContext ctx, LibraryBuilder lib) {
|
||||||
return Method((meth) {
|
return Method((meth) {
|
||||||
|
// Check to see if clazz extends Model class
|
||||||
var autoIdAndDateFields = const TypeChecker.fromRuntime(Model)
|
var autoIdAndDateFields = const TypeChecker.fromRuntime(Model)
|
||||||
.isAssignableFromType(ctx.buildContext.clazz.thisType);
|
.isAssignableFromType(ctx.buildContext.clazz.thisType);
|
||||||
meth
|
meth
|
||||||
|
@ -165,7 +166,7 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
var colType = refer('Column');
|
var colType = refer('Column');
|
||||||
var columnTypeType = refer('ColumnType');
|
var columnTypeType = refer('ColumnType');
|
||||||
|
|
||||||
if (col.length == null) {
|
if (col.length == 0) {
|
||||||
methodName = 'declare';
|
methodName = 'declare';
|
||||||
provColumn = columnTypeType.newInstance([
|
provColumn = columnTypeType.newInstance([
|
||||||
literal(col.type.name),
|
literal(col.type.name),
|
||||||
|
@ -268,10 +269,31 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
|
|
||||||
// var field = table.property('integer').call([literal(key)]);
|
// var field = table.property('integer').call([literal(key)]);
|
||||||
// // .references('user', 'id').onDeleteCascade()
|
// // .references('user', 'id').onDeleteCascade()
|
||||||
|
|
||||||
|
// Check to see if foreign clazz extends Model class
|
||||||
|
var foreignTableType =
|
||||||
|
relationship.foreign?.buildContext.clazz.thisType;
|
||||||
|
var foreignAautoIdAndDateFields = false;
|
||||||
|
if (foreignTableType != null) {
|
||||||
|
foreignAautoIdAndDateFields =
|
||||||
|
const TypeChecker.fromRuntime(Model)
|
||||||
|
.isAssignableFromType(foreignTableType);
|
||||||
|
}
|
||||||
|
|
||||||
var columnTypeType = refer('ColumnType');
|
var columnTypeType = refer('ColumnType');
|
||||||
var key = relationship.localKey;
|
var key = relationship.localKey;
|
||||||
var keyType = relationship
|
|
||||||
.foreign!.columns[relationship.foreignKey!]!.type.name;
|
// Default to `int` if foreign class extends Model with implicit 'id'
|
||||||
|
// as primary key
|
||||||
|
String? keyType;
|
||||||
|
if (foreignAautoIdAndDateFields != false &&
|
||||||
|
relationship.foreignKey == "id") {
|
||||||
|
keyType = "int";
|
||||||
|
} else {
|
||||||
|
var foreigColumnName =
|
||||||
|
relationship.foreign?.columns[relationship.foreignKey!];
|
||||||
|
keyType = foreigColumnName?.type.name;
|
||||||
|
}
|
||||||
|
|
||||||
var field = table.property('declare').call([
|
var field = table.property('declare').call([
|
||||||
literal(key),
|
literal(key),
|
||||||
|
|
|
@ -3,14 +3,12 @@ import 'dart:async';
|
||||||
import 'package:analyzer/dart/constant/value.dart';
|
import 'package:analyzer/dart/constant/value.dart';
|
||||||
import 'package:analyzer/dart/element/element.dart';
|
import 'package:analyzer/dart/element/element.dart';
|
||||||
import 'package:analyzer/dart/element/type.dart';
|
import 'package:analyzer/dart/element/type.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:angel3_serialize_generator/angel3_serialize_generator.dart';
|
import 'package:angel3_serialize_generator/angel3_serialize_generator.dart';
|
||||||
import 'package:angel3_serialize_generator/build_context.dart';
|
import 'package:angel3_serialize_generator/build_context.dart';
|
||||||
import 'package:angel3_serialize_generator/context.dart';
|
import 'package:angel3_serialize_generator/context.dart';
|
||||||
import 'package:build/build.dart';
|
import 'package:build/build.dart';
|
||||||
import 'package:collection/collection.dart' show IterableExtension;
|
|
||||||
import 'package:inflection3/inflection3.dart';
|
import 'package:inflection3/inflection3.dart';
|
||||||
import 'package:recase/recase.dart';
|
import 'package:recase/recase.dart';
|
||||||
import 'package:source_gen/source_gen.dart';
|
import 'package:source_gen/source_gen.dart';
|
||||||
|
@ -105,11 +103,11 @@ Future<OrmBuildContext?> buildOrmContext(
|
||||||
// Read all fields
|
// Read all fields
|
||||||
for (var field in buildCtx.fields) {
|
for (var field in buildCtx.fields) {
|
||||||
// Check for column annotation...
|
// Check for column annotation...
|
||||||
Column? column;
|
|
||||||
var element = _findElement(field);
|
var element = _findElement(field);
|
||||||
var columnAnnotation = columnTypeChecker.firstAnnotationOf(element);
|
var columnAnnotation = columnTypeChecker.firstAnnotationOf(element);
|
||||||
// print('${element.name} => $columnAnnotation');
|
// print('${element.name} => $columnAnnotation');
|
||||||
|
|
||||||
|
Column? column;
|
||||||
if (columnAnnotation != null) {
|
if (columnAnnotation != null) {
|
||||||
column = reviveColumn(ConstantReader(columnAnnotation));
|
column = reviveColumn(ConstantReader(columnAnnotation));
|
||||||
}
|
}
|
||||||
|
@ -281,6 +279,7 @@ Future<OrmBuildContext?> buildOrmContext(
|
||||||
|
|
||||||
if (relation.type == RelationshipType.belongsTo) {
|
if (relation.type == RelationshipType.belongsTo) {
|
||||||
var localKey = relation.localKey;
|
var localKey = relation.localKey;
|
||||||
|
|
||||||
if (localKey != null) {
|
if (localKey != null) {
|
||||||
var name = ReCase(localKey).camelCase;
|
var name = ReCase(localKey).camelCase;
|
||||||
ctx.buildContext.aliases[name] = localKey;
|
ctx.buildContext.aliases[name] = localKey;
|
||||||
|
@ -292,10 +291,14 @@ Future<OrmBuildContext?> buildOrmContext(
|
||||||
|
|
||||||
if (foreign != null) {
|
if (foreign != null) {
|
||||||
if (isSpecialId(foreign, foreignField)) {
|
if (isSpecialId(foreign, foreignField)) {
|
||||||
//type = field.type.element.context.typeProvider.intType;
|
// Use integer
|
||||||
type = field.type;
|
type = field.type.element?.library?.typeProvider.intType
|
||||||
|
as DartType;
|
||||||
|
|
||||||
|
//type = field.type.element?.context.typeProvider.intType;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var rf = RelationFieldImpl(name, relation, type, field);
|
var rf = RelationFieldImpl(name, relation, type, field);
|
||||||
ctx.effectiveFields.add(rf);
|
ctx.effectiveFields.add(rf);
|
||||||
}
|
}
|
||||||
|
@ -389,7 +392,7 @@ Column reviveColumn(ConstantReader cr) {
|
||||||
|
|
||||||
return Column(
|
return Column(
|
||||||
isNullable: cr.peek('isNullable')?.boolValue ?? false,
|
isNullable: cr.peek('isNullable')?.boolValue ?? false,
|
||||||
length: cr.peek('length')?.intValue ?? 256,
|
length: cr.peek('length')?.intValue ?? 255,
|
||||||
type: columnType,
|
type: columnType,
|
||||||
indexType: indexType,
|
indexType: indexType,
|
||||||
);
|
);
|
||||||
|
|
|
@ -192,9 +192,7 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
m
|
m
|
||||||
..name = 'parseRow'
|
..name = 'parseRow'
|
||||||
..static = true
|
..static = true
|
||||||
..returns = TypeReference((b) => b
|
..returns = refer('Optional<${rc.pascalCase}>')
|
||||||
..symbol = '${rc.pascalCase}'
|
|
||||||
..isNullable = true) //refer('${rc.pascalCase}?')
|
|
||||||
..requiredParameters.add(Parameter((b) => b
|
..requiredParameters.add(Parameter((b) => b
|
||||||
..name = 'row'
|
..name = 'row'
|
||||||
..type = refer('List')))
|
..type = refer('List')))
|
||||||
|
@ -233,8 +231,8 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
args[field.name] = expr;
|
args[field.name] = expr;
|
||||||
}
|
}
|
||||||
|
|
||||||
b.statements
|
b.statements.add(Code(
|
||||||
.add(Code('if (row.every((x) => x == null)) { return null; }'));
|
'if (row.every((x) => x == null)) { return Optional.empty(); }'));
|
||||||
|
|
||||||
b.addExpression(ctx.buildContext.modelClassType
|
b.addExpression(ctx.buildContext.modelClassType
|
||||||
.newInstance([], args).assignVar('model'));
|
.newInstance([], args).assignVar('model'));
|
||||||
|
@ -270,30 +268,52 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
.property('parseRow')
|
.property('parseRow')
|
||||||
.call([skipToList]);
|
.call([skipToList]);
|
||||||
|
|
||||||
var baseClass = '${foreign.buildContext.originalClassName}';
|
//var baseClass = '${foreign.buildContext.originalClassName}';
|
||||||
|
//var modelClass =
|
||||||
|
// foreign.buildContext.modelClassNameRecase.pascalCase;
|
||||||
// Assume: baseclass starts with "_"
|
// Assume: baseclass starts with "_"
|
||||||
//'_${foreign.buildContext.modelClassNameRecase.pascalCase}';
|
//'_${foreign.buildContext.modelClassNameRecase.pascalCase}';
|
||||||
|
|
||||||
if (relation.type == RelationshipType.hasMany) {
|
//if (relation.type == RelationshipType.hasMany) {
|
||||||
parsed = literalList([parsed.asA(refer(baseClass))]);
|
// parsed = literalList([parsed.asA(refer(modelClass))]);
|
||||||
var pp = parsed.accept(DartEmitter(useNullSafetySyntax: true));
|
//parsed = literalList([parsed.asA(refer(baseClass))]);
|
||||||
parsed = CodeExpression(Code('$pp'));
|
// var pp = parsed.accept(DartEmitter(useNullSafetySyntax: true));
|
||||||
//Code('$pp.where((x) => x != null).toList()'));
|
// parsed = CodeExpression(Code('$pp'));
|
||||||
}
|
//Code('$pp.where((x) => x != null).toList()'));
|
||||||
|
//}
|
||||||
|
|
||||||
|
//var expr =
|
||||||
|
// refer('model').property('copyWith').call([], {name: parsed});
|
||||||
|
//var block =
|
||||||
|
// Block((b) => b.addExpression(refer('model').assign(expr)));
|
||||||
|
|
||||||
|
var stmt = parsed.assignVar('modelOpt');
|
||||||
|
//var e = refer('Optional').property('ifPresent').call([]);
|
||||||
|
|
||||||
|
var val =
|
||||||
|
(relation.type == RelationshipType.hasMany) ? '[m]' : 'm';
|
||||||
|
var code = Code('''
|
||||||
|
modelOpt.ifPresent((m) {
|
||||||
|
model = model.copyWith($name: $val);
|
||||||
|
})
|
||||||
|
''');
|
||||||
|
|
||||||
|
var block = Block((b) {
|
||||||
|
b.addExpression(stmt);
|
||||||
|
b.addExpression(CodeExpression(code));
|
||||||
|
});
|
||||||
|
|
||||||
var expr =
|
|
||||||
refer('model').property('copyWith').call([], {name: parsed});
|
|
||||||
var block =
|
|
||||||
Block((b) => b.addExpression(refer('model').assign(expr)));
|
|
||||||
var blockStr =
|
var blockStr =
|
||||||
block.accept(DartEmitter(useNullSafetySyntax: true));
|
block.accept(DartEmitter(useNullSafetySyntax: true));
|
||||||
|
|
||||||
var ifStr = 'if (row.length > $i) { $blockStr }';
|
var ifStr = 'if (row.length > $i) { $blockStr }';
|
||||||
b.statements.add(Code(ifStr));
|
b.statements.add(Code(ifStr));
|
||||||
|
|
||||||
i += foreign.effectiveFields.length;
|
i += foreign.effectiveFields.length;
|
||||||
});
|
});
|
||||||
|
|
||||||
b.addExpression(refer('model').returned);
|
b.addExpression(
|
||||||
|
refer('Optional.of').call([refer('model')]).returned);
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
@ -307,9 +327,7 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
..name = 'row'
|
..name = 'row'
|
||||||
..type = refer('List')))
|
..type = refer('List')))
|
||||||
..body = Block((b) {
|
..body = Block((b) {
|
||||||
b.addExpression(refer('Optional.ofNullable').call([
|
b.addExpression(refer('parseRow').call([refer('row')]).returned);
|
||||||
refer('parseRow').call([refer('row')])
|
|
||||||
]).returned);
|
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
@ -375,18 +393,18 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
relationForeign.buildContext.resolveFieldName(f.name));
|
relationForeign.buildContext.resolveFieldName(f.name));
|
||||||
|
|
||||||
var additionalFields = <Expression>[];
|
var additionalFields = <Expression>[];
|
||||||
additionalStrs.forEach((element) {
|
for (var element in additionalStrs) {
|
||||||
if (element != null) {
|
if (element != null) {
|
||||||
additionalFields.add(literalString(element));
|
additionalFields.add(literalString(element));
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
var joinArgs = <Expression>[];
|
var joinArgs = <Expression>[];
|
||||||
[relation.localKey, relation.foreignKey].forEach((element) {
|
for (var element in [relation.localKey, relation.foreignKey]) {
|
||||||
if (element != null) {
|
if (element != null) {
|
||||||
joinArgs.add(literalString(element));
|
joinArgs.add(literalString(element));
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
// In the case of a many-to-many, we don't generate a subquery field,
|
// In the case of a many-to-many, we don't generate a subquery field,
|
||||||
// as it easily leads to stack overflows.
|
// as it easily leads to stack overflows.
|
||||||
|
@ -556,10 +574,9 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
.accept(DartEmitter(useNullSafetySyntax: true))
|
.accept(DartEmitter(useNullSafetySyntax: true))
|
||||||
.toString()
|
.toString()
|
||||||
.replaceAll('?', '');
|
.replaceAll('?', '');
|
||||||
|
|
||||||
merge.add('''
|
merge.add('''
|
||||||
$name: $typeLiteral.from(l.$name)..addAll(model.$name)
|
$name: $typeLiteral.from(l.$name ?? [])..addAll(model.$name ?? [])
|
||||||
''');
|
''');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -621,7 +638,6 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
var initializers = <Code>[];
|
var initializers = <Code>[];
|
||||||
|
|
||||||
// Add builders for each field
|
// Add builders for each field
|
||||||
|
|
||||||
for (var field in ctx.effectiveNormalFields) {
|
for (var field in ctx.effectiveNormalFields) {
|
||||||
String? name = field.name;
|
String? name = field.name;
|
||||||
|
|
||||||
|
@ -645,7 +661,7 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
//log.fine('$name type = [$typeName]');
|
//log.fine('$name type = [$typeName]');
|
||||||
builderType = TypeReference((b) => b
|
builderType = TypeReference((b) => b
|
||||||
..symbol = 'NumericSqlExpressionBuilder'
|
..symbol = 'NumericSqlExpressionBuilder'
|
||||||
..types.add(refer('$typeName')));
|
..types.add(refer(typeName)));
|
||||||
} else if (type is InterfaceType && type.element.isEnum) {
|
} else if (type is InterfaceType && type.element.isEnum) {
|
||||||
builderType = TypeReference((b) => b
|
builderType = TypeReference((b) => b
|
||||||
..symbol = 'EnumSqlExpressionBuilder'
|
..symbol = 'EnumSqlExpressionBuilder'
|
||||||
|
@ -670,7 +686,7 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
||||||
|
|
||||||
// Detect relationship
|
// Detect relationship
|
||||||
} else if (name.endsWith('Id')) {
|
} else if (name.endsWith('Id')) {
|
||||||
//log.fine('Relationship detected = $name');
|
log.fine('Foreign Relationship detected = $name');
|
||||||
var relation = ctx.relations[name.replaceAll('Id', '')];
|
var relation = ctx.relations[name.replaceAll('Id', '')];
|
||||||
if (relation != null) {
|
if (relation != null) {
|
||||||
//if (relation?.type != RelationshipType.belongsTo) {
|
//if (relation?.type != RelationshipType.belongsTo) {
|
||||||
|
|
|
@ -30,7 +30,7 @@ class RelationshipReader {
|
||||||
final String? localKey;
|
final String? localKey;
|
||||||
final String? foreignKey;
|
final String? foreignKey;
|
||||||
final String? foreignTable;
|
final String? foreignTable;
|
||||||
final bool? cascadeOnDelete;
|
final bool cascadeOnDelete;
|
||||||
final DartType? through;
|
final DartType? through;
|
||||||
final OrmBuildContext? foreign;
|
final OrmBuildContext? foreign;
|
||||||
final OrmBuildContext? throughContext;
|
final OrmBuildContext? throughContext;
|
||||||
|
@ -40,7 +40,7 @@ class RelationshipReader {
|
||||||
{this.localKey,
|
{this.localKey,
|
||||||
this.foreignKey,
|
this.foreignKey,
|
||||||
this.foreignTable,
|
this.foreignTable,
|
||||||
this.cascadeOnDelete,
|
this.cascadeOnDelete = false,
|
||||||
this.through,
|
this.through,
|
||||||
this.foreign,
|
this.foreign,
|
||||||
this.throughContext,
|
this.throughContext,
|
||||||
|
@ -75,13 +75,13 @@ class RelationshipReader {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
FieldElement findForeignField(OrmBuildContext? ctx) {
|
FieldElement findForeignField(OrmBuildContext ctx) {
|
||||||
var foreign = throughContext ?? this.foreign!;
|
var foreign = throughContext ?? this.foreign!;
|
||||||
return foreign.effectiveFields.firstWhere(
|
return foreign.effectiveFields.firstWhere(
|
||||||
(f) => foreign.buildContext.resolveFieldName(f.name) == foreignKey,
|
(f) => foreign.buildContext.resolveFieldName(f.name) == foreignKey,
|
||||||
orElse: () {
|
orElse: () {
|
||||||
throw '${foreign.buildContext.clazz.name} has no field that maps to the name "$foreignKey", '
|
throw '${foreign.buildContext.clazz.name} has no field that maps to the name "$foreignKey", '
|
||||||
'but ${ctx!.buildContext.clazz.name} has a @HasMany() relation that expects such a field.';
|
'but ${ctx.buildContext.clazz.name} has a @HasMany() relation that expects such a field.';
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
name: angel3_orm_generator
|
name: angel3_orm_generator
|
||||||
version: 4.1.2
|
version: 4.1.3
|
||||||
description: Code generators for Angel3 ORM. Generates query builder classes.
|
description: Code generators for Angel3 ORM. Generates query builder classes.
|
||||||
homepage: https://angel3-framework.web.app/
|
homepage: https://angel3-framework.web.app/
|
||||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_generator
|
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_generator
|
||||||
|
@ -30,4 +30,9 @@ dev_dependencies:
|
||||||
postgres: ^2.4.0
|
postgres: ^2.4.0
|
||||||
test: ^1.17.3
|
test: ^1.17.3
|
||||||
lints: ^1.0.0
|
lints: ^1.0.0
|
||||||
|
dependency_overrides:
|
||||||
|
angel3_orm:
|
||||||
|
path: ../angel_orm
|
||||||
|
angel3_migration:
|
||||||
|
path: ../angel_migration
|
||||||
|
|
|
@ -7,4 +7,11 @@
|
||||||
|
|
||||||
Mysql support for Angel3 ORM.
|
Mysql support for Angel3 ORM.
|
||||||
|
|
||||||
|
## Supported database
|
||||||
|
|
||||||
|
* MySQL 8.0 or later
|
||||||
|
* MariaDb 10.2.1 or later
|
||||||
|
|
||||||
|
**Note** ORM for MySQL requires support for `WITH` SQL Syntax
|
||||||
|
|
||||||
For documentation about the ORM, head to the main project repo: <https://github.com/dukefirehawk/angel/tree/master/packages/orm>
|
For documentation about the ORM, head to the main project repo: <https://github.com/dukefirehawk/angel/tree/master/packages/orm>
|
||||||
|
|
|
@ -5,6 +5,10 @@
|
||||||
[![Gitter](https://img.shields.io/gitter/room/angel_dart/discussion)](https://gitter.im/angel_dart/discussion)
|
[![Gitter](https://img.shields.io/gitter/room/angel_dart/discussion)](https://gitter.im/angel_dart/discussion)
|
||||||
[![License](https://img.shields.io/github/license/dukefirehawk/angel)](https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_postgres/LICENSE)
|
[![License](https://img.shields.io/github/license/dukefirehawk/angel)](https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_postgres/LICENSE)
|
||||||
|
|
||||||
Postgresql support for Angel3 ORM. Supported version: 10, 11, 12, 13 and 14
|
Postgresql support for Angel3 ORM.
|
||||||
|
|
||||||
|
## Supported PosgtreSQL
|
||||||
|
|
||||||
|
* version 10 or later
|
||||||
|
|
||||||
For documentation about the ORM, see [Developer Guide](https://angel3-docs.dukefirehawk.com/guides/orm)
|
For documentation about the ORM, see [Developer Guide](https://angel3-docs.dukefirehawk.com/guides/orm)
|
||||||
|
|
|
@ -16,8 +16,8 @@ dev_dependencies:
|
||||||
angel3_orm_test: ^3.0.0
|
angel3_orm_test: ^3.0.0
|
||||||
test: ^1.17.5
|
test: ^1.17.5
|
||||||
lints: ^1.0.0
|
lints: ^1.0.0
|
||||||
#dependency_overrides:
|
dependency_overrides:
|
||||||
# angel3_orm_test:
|
angel3_orm_test:
|
||||||
# path: ../angel_orm_test
|
path: ../angel_orm_test
|
||||||
# angel3_orm:
|
angel3_orm:
|
||||||
# path: ../angel_orm
|
path: ../angel_orm
|
||||||
|
|
|
@ -26,8 +26,8 @@ void belongsToTests(FutureOr<QueryExecutor> Function() createExecutor,
|
||||||
// And a book
|
// And a book
|
||||||
var bookQuery = BookQuery();
|
var bookQuery = BookQuery();
|
||||||
bookQuery.values
|
bookQuery.values
|
||||||
..authorId = int.parse(jkRowling!.id!)
|
..authorId = jkRowling?.idAsInt ?? 0
|
||||||
..partnerAuthorId = int.parse(jameson!.id!)
|
..partnerAuthorId = jameson?.idAsInt ?? 0
|
||||||
..name = 'Deathly Hallows';
|
..name = 'Deathly Hallows';
|
||||||
|
|
||||||
deathlyHallows = (await bookQuery.insert(executor)).value;
|
deathlyHallows = (await bookQuery.insert(executor)).value;
|
||||||
|
@ -162,13 +162,13 @@ void belongsToTests(FutureOr<QueryExecutor> Function() createExecutor,
|
||||||
// that should return correctly.
|
// that should return correctly.
|
||||||
test('returns empty on false subquery', () async {
|
test('returns empty on false subquery', () async {
|
||||||
printSeparator('False subquery test');
|
printSeparator('False subquery test');
|
||||||
var query = BookQuery()..author!.where!.name.equals('Billie Jean');
|
var query = BookQuery()..author.where!.name.equals('Billie Jean');
|
||||||
expect(await query.get(executor), isEmpty);
|
expect(await query.get(executor), isEmpty);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('returns values on true subquery', () async {
|
test('returns values on true subquery', () async {
|
||||||
printSeparator('True subquery test');
|
printSeparator('True subquery test');
|
||||||
var query = BookQuery()..author!.where!.name.like('%Rowling%');
|
var query = BookQuery()..author.where!.name.like('%Rowling%');
|
||||||
expect(await query.get(executor), [deathlyHallows]);
|
expect(await query.get(executor), [deathlyHallows]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -31,8 +31,8 @@ void hasManyTests(FutureOr<QueryExecutor> Function() createExecutor,
|
||||||
void verify(Tree tree) {
|
void verify(Tree tree) {
|
||||||
//print(tree.fruits!.map(FruitSerializer.toMap).toList());
|
//print(tree.fruits!.map(FruitSerializer.toMap).toList());
|
||||||
expect(tree.fruits, hasLength(2));
|
expect(tree.fruits, hasLength(2));
|
||||||
expect(tree.fruits![0].commonName, apple!.commonName);
|
expect(tree.fruits[0].commonName, apple!.commonName);
|
||||||
expect(tree.fruits![1].commonName, banana!.commonName);
|
expect(tree.fruits[1].commonName, banana!.commonName);
|
||||||
}
|
}
|
||||||
|
|
||||||
setUp(() async {
|
setUp(() async {
|
||||||
|
@ -86,7 +86,7 @@ void hasManyTests(FutureOr<QueryExecutor> Function() createExecutor,
|
||||||
test('returns empty on false subquery', () async {
|
test('returns empty on false subquery', () async {
|
||||||
var tq = TreeQuery()
|
var tq = TreeQuery()
|
||||||
..where!.id.equals(treeId)
|
..where!.id.equals(treeId)
|
||||||
..fruits!.where!.commonName.equals('Kiwi');
|
..fruits.where!.commonName.equals('Kiwi');
|
||||||
var treeOpt = await (tq.getOne(executor));
|
var treeOpt = await (tq.getOne(executor));
|
||||||
expect(treeOpt.isPresent, true);
|
expect(treeOpt.isPresent, true);
|
||||||
treeOpt.ifPresent((tree) {
|
treeOpt.ifPresent((tree) {
|
||||||
|
|
|
@ -115,7 +115,7 @@ void hasOneTests(FutureOr<QueryExecutor> Function() createExecutor,
|
||||||
test('sets null on false subquery', () async {
|
test('sets null on false subquery', () async {
|
||||||
var legQuery = LegQuery()
|
var legQuery = LegQuery()
|
||||||
..where!.id.equals(originalLeg!.idAsInt)
|
..where!.id.equals(originalLeg!.idAsInt)
|
||||||
..foot!.where!.legId.equals(originalLeg!.idAsInt + 1024);
|
..foot.where!.legId.equals(originalLeg!.idAsInt + 1024);
|
||||||
var legOpt = await (legQuery.getOne(executor));
|
var legOpt = await (legQuery.getOne(executor));
|
||||||
expect(legOpt.isPresent, true);
|
expect(legOpt.isPresent, true);
|
||||||
legOpt.ifPresent((leg) {
|
legOpt.ifPresent((leg) {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
library angel_orm3.generator.models.book;
|
library angel_orm3.generator.models.book;
|
||||||
|
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
@ -26,4 +25,7 @@ abstract class _Author extends Model {
|
||||||
@Column(length: 255, indexType: IndexType.unique)
|
@Column(length: 255, indexType: IndexType.unique)
|
||||||
@SerializableField(defaultValue: 'Tobe Osakwe')
|
@SerializableField(defaultValue: 'Tobe Osakwe')
|
||||||
String? get name;
|
String? get name;
|
||||||
|
|
||||||
|
//@Column(name: "pub")
|
||||||
|
//String? publisher;
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,12 +13,10 @@ class BookMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.varChar('name');
|
table.varChar('name', length: 255);
|
||||||
|
table.declare('author_id', ColumnType('int')).references('authors', 'id');
|
||||||
table
|
table
|
||||||
.declare('author_id', ColumnType('serial'))
|
.declare('partner_author_id', ColumnType('int'))
|
||||||
.references('authors', 'id');
|
|
||||||
table
|
|
||||||
.declare('partner_author_id', ColumnType('serial'))
|
|
||||||
.references('authors', 'id');
|
.references('authors', 'id');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -36,7 +34,9 @@ class AuthorMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.varChar('name', length: 255).defaultsTo('Tobe Osakwe');
|
table.varChar('name', length: 255)
|
||||||
|
..defaultsTo('Tobe Osakwe')
|
||||||
|
..unique();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -70,9 +70,9 @@ class BookQuery extends Query<Book, BookQueryWhere> {
|
||||||
|
|
||||||
BookQueryWhere? _where;
|
BookQueryWhere? _where;
|
||||||
|
|
||||||
AuthorQuery? _author;
|
late AuthorQuery _author;
|
||||||
|
|
||||||
AuthorQuery? _partnerAuthor;
|
late AuthorQuery _partnerAuthor;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -135,11 +135,11 @@ class BookQuery extends Query<Book, BookQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
AuthorQuery? get author {
|
AuthorQuery get author {
|
||||||
return _author;
|
return _author;
|
||||||
}
|
}
|
||||||
|
|
||||||
AuthorQuery? get partnerAuthor {
|
AuthorQuery get partnerAuthor {
|
||||||
return _partnerAuthor;
|
return _partnerAuthor;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -193,16 +193,16 @@ class BookQueryValues extends MapQueryValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||||
int? get authorId {
|
int get authorId {
|
||||||
return (values['author_id'] as int?);
|
return (values['author_id'] as int);
|
||||||
}
|
}
|
||||||
|
|
||||||
set authorId(int? value) => values['author_id'] = value;
|
set authorId(int value) => values['author_id'] = value;
|
||||||
int? get partnerAuthorId {
|
int get partnerAuthorId {
|
||||||
return (values['partner_author_id'] as int?);
|
return (values['partner_author_id'] as int);
|
||||||
}
|
}
|
||||||
|
|
||||||
set partnerAuthorId(int? value) => values['partner_author_id'] = value;
|
set partnerAuthorId(int value) => values['partner_author_id'] = value;
|
||||||
String? get name {
|
String? get name {
|
||||||
return (values['name'] as String?);
|
return (values['name'] as String?);
|
||||||
}
|
}
|
||||||
|
@ -213,10 +213,10 @@ class BookQueryValues extends MapQueryValues {
|
||||||
updatedAt = model.updatedAt;
|
updatedAt = model.updatedAt;
|
||||||
name = model.name;
|
name = model.name;
|
||||||
if (model.author != null) {
|
if (model.author != null) {
|
||||||
values['author_id'] = model.author!.id;
|
values['author_id'] = model.author?.id;
|
||||||
}
|
}
|
||||||
if (model.partnerAuthor != null) {
|
if (model.partnerAuthor != null) {
|
||||||
values['partner_author_id'] = model.partnerAuthor!.id;
|
values['partner_author_id'] = model.partnerAuthor?.id;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -267,8 +267,8 @@ class AuthorQuery extends Query<Author, AuthorQueryWhere> {
|
||||||
id: row[0].toString(),
|
id: row[0].toString(),
|
||||||
createdAt: (row[1] as DateTime?),
|
createdAt: (row[1] as DateTime?),
|
||||||
updatedAt: (row[2] as DateTime?),
|
updatedAt: (row[2] as DateTime?),
|
||||||
name: (row[3]) as String?);
|
name: (row[3] as String?));
|
||||||
return Optional.ofNullable(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -425,7 +425,7 @@ class Author extends _Author {
|
||||||
DateTime? updatedAt;
|
DateTime? updatedAt;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? name;
|
String? name;
|
||||||
|
|
||||||
Author copyWith(
|
Author copyWith(
|
||||||
{String? id, DateTime? createdAt, DateTime? updatedAt, String? name}) {
|
{String? id, DateTime? createdAt, DateTime? updatedAt, String? name}) {
|
||||||
|
@ -455,7 +455,7 @@ class Author extends _Author {
|
||||||
return 'Author(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, name=$name)';
|
return 'Author(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, name=$name)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return AuthorSerializer.toMap(this);
|
return AuthorSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -492,12 +492,12 @@ class BookSerializer extends Codec<Book, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
author: map['author'] != null
|
author: map['author'] != null
|
||||||
|
@ -509,7 +509,10 @@ class BookSerializer extends Codec<Book, Map> {
|
||||||
name: map['name'] as String?);
|
name: map['name'] as String?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Book model) {
|
static Map<String, dynamic> toMap(_Book? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
@ -546,11 +549,11 @@ abstract class BookFields {
|
||||||
|
|
||||||
const AuthorSerializer authorSerializer = AuthorSerializer();
|
const AuthorSerializer authorSerializer = AuthorSerializer();
|
||||||
|
|
||||||
class AuthorEncoder extends Converter<Author, Map?> {
|
class AuthorEncoder extends Converter<Author, Map> {
|
||||||
const AuthorEncoder();
|
const AuthorEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Author model) => AuthorSerializer.toMap(model);
|
Map convert(Author model) => AuthorSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class AuthorDecoder extends Converter<Map, Author> {
|
class AuthorDecoder extends Converter<Map, Author> {
|
||||||
|
@ -560,7 +563,7 @@ class AuthorDecoder extends Converter<Map, Author> {
|
||||||
Author convert(Map map) => AuthorSerializer.fromMap(map);
|
Author convert(Map map) => AuthorSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class AuthorSerializer extends Codec<Author, Map?> {
|
class AuthorSerializer extends Codec<Author, Map> {
|
||||||
const AuthorSerializer();
|
const AuthorSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -572,20 +575,20 @@ class AuthorSerializer extends Codec<Author, Map?> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
name: map['name'] as String? ?? 'Tobe Osakwe');
|
name: map['name'] as String? ?? 'Tobe Osakwe');
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Author? model) {
|
static Map<String, dynamic> toMap(_Author? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
library angel_orm3.generator.models.car;
|
library angel_orm3.generator.models.car;
|
||||||
|
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
|
@ -13,8 +13,8 @@ class CarMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.varChar('make');
|
table.varChar('make', length: 255);
|
||||||
table.varChar('description');
|
table.varChar('description', length: 255);
|
||||||
table.boolean('family_friendly');
|
table.boolean('family_friendly');
|
||||||
table.timeStamp('recalled_at');
|
table.timeStamp('recalled_at');
|
||||||
});
|
});
|
||||||
|
@ -309,12 +309,12 @@ class CarSerializer extends Codec<Car, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
make: map['make'] as String?,
|
make: map['make'] as String?,
|
||||||
|
@ -322,12 +322,15 @@ class CarSerializer extends Codec<Car, Map> {
|
||||||
familyFriendly: map['family_friendly'] as bool?,
|
familyFriendly: map['family_friendly'] as bool?,
|
||||||
recalledAt: map['recalled_at'] != null
|
recalledAt: map['recalled_at'] != null
|
||||||
? (map['recalled_at'] is DateTime
|
? (map['recalled_at'] is DateTime
|
||||||
? (map['recalled_at'] as DateTime?)
|
? (map['recalled_at'] as DateTime)
|
||||||
: DateTime.parse(map['recalled_at'].toString()))
|
: DateTime.parse(map['recalled_at'].toString()))
|
||||||
: null);
|
: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Car model) {
|
static Map<String, dynamic> toMap(_Car? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
|
@ -29,9 +29,9 @@ class AlphabetMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.varChar('value');
|
table.varChar('value', length: 255);
|
||||||
table
|
table
|
||||||
.declare('numbers_id', ColumnType('serial'))
|
.declare('numbers_id', ColumnType('int'))
|
||||||
.references('numbers', 'id');
|
.references('numbers', 'id');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -165,7 +165,7 @@ class AlphabetQuery extends Query<Alphabet, AlphabetQueryWhere> {
|
||||||
|
|
||||||
AlphabetQueryWhere? _where;
|
AlphabetQueryWhere? _where;
|
||||||
|
|
||||||
NumbersQuery? _numbers;
|
late NumbersQuery _numbers;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -215,7 +215,7 @@ class AlphabetQuery extends Query<Alphabet, AlphabetQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
NumbersQuery? get numbers {
|
NumbersQuery get numbers {
|
||||||
return _numbers;
|
return _numbers;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -270,17 +270,17 @@ class AlphabetQueryValues extends MapQueryValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
set value(String? value) => values['value'] = value;
|
set value(String? value) => values['value'] = value;
|
||||||
int? get numbersId {
|
int get numbersId {
|
||||||
return (values['numbers_id'] as int?);
|
return (values['numbers_id'] as int);
|
||||||
}
|
}
|
||||||
|
|
||||||
set numbersId(int? value) => values['numbers_id'] = value;
|
set numbersId(int value) => values['numbers_id'] = value;
|
||||||
void copyFrom(Alphabet model) {
|
void copyFrom(Alphabet model) {
|
||||||
createdAt = model.createdAt;
|
createdAt = model.createdAt;
|
||||||
updatedAt = model.updatedAt;
|
updatedAt = model.updatedAt;
|
||||||
value = model.value;
|
value = model.value;
|
||||||
if (model.numbers != null) {
|
if (model.numbers != null) {
|
||||||
values['numbers_id'] = model.numbers!.id;
|
values['numbers_id'] = model.numbers?.id;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -336,7 +336,7 @@ class Numbers extends _Numbers {
|
||||||
return 'Numbers(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, two=$two)';
|
return 'Numbers(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, two=$two)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return NumbersSerializer.toMap(this);
|
return NumbersSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -408,11 +408,11 @@ class Alphabet extends _Alphabet {
|
||||||
|
|
||||||
const NumbersSerializer numbersSerializer = NumbersSerializer();
|
const NumbersSerializer numbersSerializer = NumbersSerializer();
|
||||||
|
|
||||||
class NumbersEncoder extends Converter<Numbers, Map?> {
|
class NumbersEncoder extends Converter<Numbers, Map> {
|
||||||
const NumbersEncoder();
|
const NumbersEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Numbers model) => NumbersSerializer.toMap(model);
|
Map convert(Numbers model) => NumbersSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class NumbersDecoder extends Converter<Map, Numbers> {
|
class NumbersDecoder extends Converter<Map, Numbers> {
|
||||||
|
@ -422,7 +422,7 @@ class NumbersDecoder extends Converter<Map, Numbers> {
|
||||||
Numbers convert(Map map) => NumbersSerializer.fromMap(map);
|
Numbers convert(Map map) => NumbersSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class NumbersSerializer extends Codec<Numbers, Map?> {
|
class NumbersSerializer extends Codec<Numbers, Map> {
|
||||||
const NumbersSerializer();
|
const NumbersSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -434,20 +434,20 @@ class NumbersSerializer extends Codec<Numbers, Map?> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
two: map['two'] as int?);
|
two: map['two'] as int?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Numbers? model) {
|
static Map<String, dynamic> toMap(_Numbers? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
|
@ -498,12 +498,12 @@ class AlphabetSerializer extends Codec<Alphabet, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
value: map['value'] as String?,
|
value: map['value'] as String?,
|
||||||
|
@ -512,7 +512,10 @@ class AlphabetSerializer extends Codec<Alphabet, Map> {
|
||||||
: null);
|
: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Alphabet model) {
|
static Map<String, dynamic> toMap(_Alphabet? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
|
|
@ -10,7 +10,7 @@ class RoleMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('roles', (table) {
|
schema.create('roles', (table) {
|
||||||
table.declare('role', ColumnType('varchar')).primaryKey();
|
table.varChar('role', length: 255).primaryKey();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,9 +43,9 @@ class UserMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('users', (table) {
|
schema.create('users', (table) {
|
||||||
table.varChar('email').primaryKey();
|
table.varChar('email', length: 255).primaryKey();
|
||||||
table.varChar('name');
|
table.varChar('name', length: 255);
|
||||||
table.varChar('password');
|
table.varChar('password', length: 255);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
@override
|
@override
|
||||||
final RoleQueryValues values = RoleQueryValues();
|
final RoleQueryValues values = RoleQueryValues();
|
||||||
|
|
||||||
late RoleQueryWhere _where;
|
RoleQueryWhere? _where;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -108,12 +108,12 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
}
|
}
|
||||||
var model = Role(role: (row[0] as String?));
|
var model = Role(role: (row[0] as String?));
|
||||||
if (row.length > 1) {
|
if (row.length > 1) {
|
||||||
var rowDataOpt = UserQuery.parseRow(row.skip(1).take(3).toList());
|
var modelOpt = UserQuery.parseRow(row.skip(1).take(3).toList());
|
||||||
rowDataOpt.ifPresent((m) {
|
modelOpt.ifPresent((m) {
|
||||||
model = model.copyWith(users: [m]);
|
model = model.copyWith(users: [m]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return Optional.ofNullable(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -123,8 +123,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool canCompile(trampoline) {
|
bool canCompile(trampoline) {
|
||||||
return (!(trampoline.contains('roles') == true &&
|
return (!(trampoline.contains('roles') &&
|
||||||
trampoline.contains('role_users') == true));
|
trampoline.contains('role_users')));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -139,7 +139,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
users: List<_User>.from(l.users)..addAll(model.users));
|
users: List<_User>.from(l.users ?? [])
|
||||||
|
..addAll(model.users ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -157,7 +158,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
users: List<_User>.from(l.users)..addAll(model.users));
|
users: List<_User>.from(l.users ?? [])
|
||||||
|
..addAll(model.users ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -175,7 +177,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
users: List<_User>.from(l.users)..addAll(model.users));
|
users: List<_User>.from(l.users ?? [])
|
||||||
|
..addAll(model.users ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -230,9 +233,9 @@ class RoleUserQuery extends Query<RoleUser, RoleUserQueryWhere> {
|
||||||
|
|
||||||
RoleUserQueryWhere? _where;
|
RoleUserQueryWhere? _where;
|
||||||
|
|
||||||
RoleQuery? _role;
|
late RoleQuery _role;
|
||||||
|
|
||||||
UserQuery? _user;
|
late UserQuery _user;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -276,7 +279,7 @@ class RoleUserQuery extends Query<RoleUser, RoleUserQueryWhere> {
|
||||||
model = model.copyWith(user: m);
|
model = model.copyWith(user: m);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return Optional.ofNullable(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -284,11 +287,11 @@ class RoleUserQuery extends Query<RoleUser, RoleUserQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
RoleQuery? get role {
|
RoleQuery get role {
|
||||||
return _role;
|
return _role;
|
||||||
}
|
}
|
||||||
|
|
||||||
UserQuery? get user {
|
UserQuery get user {
|
||||||
return _user;
|
return _user;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -326,10 +329,10 @@ class RoleUserQueryValues extends MapQueryValues {
|
||||||
set userEmail(String? value) => values['user_email'] = value;
|
set userEmail(String? value) => values['user_email'] = value;
|
||||||
void copyFrom(RoleUser model) {
|
void copyFrom(RoleUser model) {
|
||||||
if (model.role != null) {
|
if (model.role != null) {
|
||||||
values['role_role'] = model.role!.role;
|
values['role_role'] = model.role?.role;
|
||||||
}
|
}
|
||||||
if (model.user != null) {
|
if (model.user != null) {
|
||||||
values['user_email'] = model.user!.email;
|
values['user_email'] = model.user?.email;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -390,10 +393,6 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
modelOpt.ifPresent((m) {
|
modelOpt.ifPresent((m) {
|
||||||
model = model.copyWith(roles: [m]);
|
model = model.copyWith(roles: [m]);
|
||||||
});
|
});
|
||||||
|
|
||||||
// roles: [RoleQuery.parseRow(row.skip(3).take(1).toList())]
|
|
||||||
// .where((x) => x != null)
|
|
||||||
// .toList());
|
|
||||||
}
|
}
|
||||||
return Optional.of(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
@ -405,8 +404,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool canCompile(trampoline) {
|
bool canCompile(trampoline) {
|
||||||
return (!(trampoline.contains('users') == true &&
|
return (!(trampoline.contains('users') &&
|
||||||
trampoline.contains('role_users') == true));
|
trampoline.contains('role_users')));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -421,7 +420,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
roles: List<_Role>.from(l.roles)..addAll(model.roles));
|
roles: List<_Role>.from(l.roles ?? [])
|
||||||
|
..addAll(model.roles ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -439,7 +439,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
roles: List<_Role>.from(l.roles)..addAll(model.roles));
|
roles: List<_Role>.from(l.roles ?? [])
|
||||||
|
..addAll(model.roles ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -457,7 +458,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
roles: List<_Role>.from(l.roles)..addAll(model.roles));
|
roles: List<_Role>.from(l.roles ?? [])
|
||||||
|
..addAll(model.roles ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -516,23 +518,23 @@ class UserQueryValues extends MapQueryValues {
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class Role implements _Role {
|
class Role implements _Role {
|
||||||
const Role({this.role, this.users = const []});
|
Role({this.role, this.users = const []});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? role;
|
String? role;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<_User> users;
|
List<_User> users;
|
||||||
|
|
||||||
Role copyWith({String? role, List<_User> users = const []}) {
|
Role copyWith({String? role, List<_User>? users}) {
|
||||||
return Role(role: role ?? this.role, users: users);
|
return Role(role: role ?? this.role, users: users ?? this.users);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(other) {
|
bool operator ==(other) {
|
||||||
return other is _Role &&
|
return other is _Role &&
|
||||||
other.role == role &&
|
other.role == role &&
|
||||||
ListEquality<_User?>(DefaultEquality<_User>())
|
ListEquality<_User>(DefaultEquality<_User>())
|
||||||
.equals(other.users, users);
|
.equals(other.users, users);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -553,13 +555,13 @@ class Role implements _Role {
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class RoleUser implements _RoleUser {
|
class RoleUser implements _RoleUser {
|
||||||
const RoleUser({this.role, this.user});
|
RoleUser({this.role, this.user});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _Role? role;
|
_Role? role;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _User? user;
|
_User? user;
|
||||||
|
|
||||||
RoleUser copyWith({_Role? role, _User? user}) {
|
RoleUser copyWith({_Role? role, _User? user}) {
|
||||||
return RoleUser(role: role ?? this.role, user: user ?? this.user);
|
return RoleUser(role: role ?? this.role, user: user ?? this.user);
|
||||||
|
@ -587,19 +589,19 @@ class RoleUser implements _RoleUser {
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class User implements _User {
|
class User implements _User {
|
||||||
const User({this.email, this.name, this.password, this.roles = const []});
|
User({this.email, this.name, this.password, this.roles = const []});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? email;
|
String? email;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? name;
|
String? name;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? password;
|
String? password;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<_Role> roles;
|
List<_Role> roles;
|
||||||
|
|
||||||
User copyWith(
|
User copyWith(
|
||||||
{String? email, String? name, String? password, List<_Role>? roles}) {
|
{String? email, String? name, String? password, List<_Role>? roles}) {
|
||||||
|
@ -607,7 +609,7 @@ class User implements _User {
|
||||||
email: email ?? this.email,
|
email: email ?? this.email,
|
||||||
name: name ?? this.name,
|
name: name ?? this.name,
|
||||||
password: password ?? this.password,
|
password: password ?? this.password,
|
||||||
roles: roles ?? []);
|
roles: roles ?? this.roles);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -641,11 +643,11 @@ class User implements _User {
|
||||||
|
|
||||||
const RoleSerializer roleSerializer = RoleSerializer();
|
const RoleSerializer roleSerializer = RoleSerializer();
|
||||||
|
|
||||||
class RoleEncoder extends Converter<Role, Map?> {
|
class RoleEncoder extends Converter<Role, Map> {
|
||||||
const RoleEncoder();
|
const RoleEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Role model) => RoleSerializer.toMap(model);
|
Map convert(Role model) => RoleSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class RoleDecoder extends Converter<Map, Role> {
|
class RoleDecoder extends Converter<Map, Role> {
|
||||||
|
@ -655,7 +657,7 @@ class RoleDecoder extends Converter<Map, Role> {
|
||||||
Role convert(Map map) => RoleSerializer.fromMap(map);
|
Role convert(Map map) => RoleSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class RoleSerializer extends Codec<Role, Map?> {
|
class RoleSerializer extends Codec<Role, Map> {
|
||||||
const RoleSerializer();
|
const RoleSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -671,7 +673,10 @@ class RoleSerializer extends Codec<Role, Map?> {
|
||||||
: []);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Role model) {
|
static Map<String, dynamic> toMap(_Role? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'role': model.role,
|
'role': model.role,
|
||||||
'users': model.users.map((m) => UserSerializer.toMap(m)).toList()
|
'users': model.users.map((m) => UserSerializer.toMap(m)).toList()
|
||||||
|
@ -720,10 +725,13 @@ class RoleUserSerializer extends Codec<RoleUser, Map> {
|
||||||
: null);
|
: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_RoleUser model) {
|
static Map<String, dynamic> toMap(_RoleUser? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'role': (model.role != null) ? RoleSerializer.toMap(model.role!) : '',
|
'role': RoleSerializer.toMap(model.role),
|
||||||
'user': (model.user != null) ? UserSerializer.toMap(model.user!) : ''
|
'user': UserSerializer.toMap(model.user)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -738,11 +746,11 @@ abstract class RoleUserFields {
|
||||||
|
|
||||||
const UserSerializer userSerializer = UserSerializer();
|
const UserSerializer userSerializer = UserSerializer();
|
||||||
|
|
||||||
class UserEncoder extends Converter<User, Map?> {
|
class UserEncoder extends Converter<User, Map> {
|
||||||
const UserEncoder();
|
const UserEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(User model) => UserSerializer.toMap(model);
|
Map convert(User model) => UserSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class UserDecoder extends Converter<Map, User> {
|
class UserDecoder extends Converter<Map, User> {
|
||||||
|
@ -752,7 +760,7 @@ class UserDecoder extends Converter<Map, User> {
|
||||||
User convert(Map map) => UserSerializer.fromMap(map);
|
User convert(Map map) => UserSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class UserSerializer extends Codec<User, Map?> {
|
class UserSerializer extends Codec<User, Map> {
|
||||||
const UserSerializer();
|
const UserSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -770,7 +778,10 @@ class UserSerializer extends Codec<User, Map?> {
|
||||||
: []);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_User model) {
|
static Map<String, dynamic> toMap(_User? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'email': model.email,
|
'email': model.email,
|
||||||
'name': model.name,
|
'name': model.name,
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
//import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
@ -9,6 +8,7 @@ part 'fortune.g.dart';
|
||||||
@serializable
|
@serializable
|
||||||
@Orm(tableName: 'fortune')
|
@Orm(tableName: 'fortune')
|
||||||
abstract class _Fortune {
|
abstract class _Fortune {
|
||||||
|
@primaryKey
|
||||||
int? id;
|
int? id;
|
||||||
|
|
||||||
@Column(length: 2048)
|
@Column(length: 2048)
|
||||||
|
|
|
@ -10,7 +10,7 @@ class FortuneMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('fortune', (table) {
|
schema.create('fortune', (table) {
|
||||||
table.integer('id');
|
table.integer('id').primaryKey();
|
||||||
table.varChar('message', length: 2048);
|
table.varChar('message', length: 2048);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -63,17 +63,17 @@ class FortuneQuery extends Query<Fortune, FortuneQueryWhere> {
|
||||||
return FortuneQueryWhere(this);
|
return FortuneQueryWhere(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Fortune? parseRow(List row) {
|
static Optional<Fortune> parseRow(List row) {
|
||||||
if (row.every((x) => x == null)) {
|
if (row.every((x) => x == null)) {
|
||||||
return null;
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
var model = Fortune(id: (row[0] as int?), message: (row[1] as String?));
|
var model = Fortune(id: (row[0] as int?), message: (row[1] as String?));
|
||||||
return model;
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Optional<Fortune> deserialize(List row) {
|
Optional<Fortune> deserialize(List row) {
|
||||||
return Optional.ofNullable(parseRow(row));
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
|
@ -13,7 +13,7 @@ class HasCarMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.integer('type');
|
table.integer('type').defaultsTo(0);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ class HasCarQuery extends Query<HasCar, HasCarQueryWhere> {
|
||||||
id: row[0].toString(),
|
id: row[0].toString(),
|
||||||
createdAt: (row[1] as DateTime?),
|
createdAt: (row[1] as DateTime?),
|
||||||
updatedAt: (row[2] as DateTime?),
|
updatedAt: (row[2] as DateTime?),
|
||||||
type: row[3] == null ? null : CarType.values[(row[3] as int)]);
|
type: row[3] == null ? null : CarType?.values[(row[3] as int)]);
|
||||||
return Optional.of(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,8 +88,8 @@ class HasCarQueryWhere extends QueryWhere {
|
||||||
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||||
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
||||||
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
||||||
type =
|
type = EnumSqlExpressionBuilder<CarType?>(
|
||||||
EnumSqlExpressionBuilder<CarType?>(query, 'type', (v) => v!.index);
|
query, 'type', (v) => v?.index as int);
|
||||||
|
|
||||||
final NumericSqlExpressionBuilder<int> id;
|
final NumericSqlExpressionBuilder<int> id;
|
||||||
|
|
||||||
|
@ -126,8 +126,8 @@ class HasCarQueryValues extends MapQueryValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||||
CarType get type {
|
CarType? get type {
|
||||||
return CarType.values[(values['type'] as int)];
|
return CarType?.values[(values['type'] as int)];
|
||||||
}
|
}
|
||||||
|
|
||||||
set type(CarType? value) => values['type'] = value?.index;
|
set type(CarType? value) => values['type'] = value?.index;
|
||||||
|
@ -159,7 +159,7 @@ class HasCar extends _HasCar {
|
||||||
DateTime? updatedAt;
|
DateTime? updatedAt;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final CarType? type;
|
CarType? type;
|
||||||
|
|
||||||
HasCar copyWith(
|
HasCar copyWith(
|
||||||
{String? id, DateTime? createdAt, DateTime? updatedAt, CarType? type}) {
|
{String? id, DateTime? createdAt, DateTime? updatedAt, CarType? type}) {
|
||||||
|
@ -230,31 +230,30 @@ class HasCarSerializer extends Codec<HasCar, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
type: map['type'] is CarType
|
type: map['type'] is CarType?
|
||||||
? (map['type'] as CarType?)
|
? (map['type'] as CarType?)
|
||||||
: (map['type'] is int
|
: (map['type'] is int
|
||||||
? CarType.values[map['type'] as int]
|
? CarType?.values[map['type'] as int]
|
||||||
: CarType.sedan));
|
: CarType.sedan));
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_HasCar model) {
|
static Map<String, dynamic> toMap(_HasCar? model) {
|
||||||
if (model.type == null) {
|
if (model == null) {
|
||||||
throw FormatException("Missing required field 'type' on HasCar.");
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
'updated_at': model.updatedAt?.toIso8601String(),
|
'updated_at': model.updatedAt?.toIso8601String(),
|
||||||
'type': model.type == null ? null : CarType.values.indexOf(model.type!)
|
'type': model.type != null ? CarType.values.indexOf(model.type!) : null
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
import 'dart:convert';
|
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:collection/collection.dart';
|
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
||||||
part 'has_map.g.dart';
|
part 'has_map.g.dart';
|
||||||
|
|
|
@ -10,8 +10,10 @@ class HasMapMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('has_maps', (table) {
|
schema.create('has_maps', (table) {
|
||||||
table.declare('value', ColumnType('jsonb'));
|
table.declareColumn(
|
||||||
table.declare('list', ColumnType('jsonb'));
|
'value', Column(type: ColumnType('jsonb'), length: 255));
|
||||||
|
table.declareColumn(
|
||||||
|
'list', Column(type: ColumnType('jsonb'), length: 255));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,10 +69,9 @@ class HasMapQuery extends Query<HasMap, HasMapQueryWhere> {
|
||||||
if (row.every((x) => x == null)) {
|
if (row.every((x) => x == null)) {
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
|
var model = HasMap(
|
||||||
var m = row[0] as Map;
|
value: (row[0] as Map<dynamic, dynamic>?),
|
||||||
var l = row[1] as List;
|
list: (row[1] as List<dynamic>?));
|
||||||
var model = HasMap(value: m, list: l);
|
|
||||||
return Optional.of(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,16 +102,16 @@ class HasMapQueryValues extends MapQueryValues {
|
||||||
return {'list': 'jsonb'};
|
return {'list': 'jsonb'};
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<dynamic, dynamic> get value {
|
Map<dynamic, dynamic>? get value {
|
||||||
return (values['value'] as Map<dynamic, dynamic>);
|
return (values['value'] as Map<dynamic, dynamic>?);
|
||||||
}
|
}
|
||||||
|
|
||||||
set value(Map<dynamic, dynamic> value) => values['value'] = value;
|
set value(Map<dynamic, dynamic>? value) => values['value'] = value;
|
||||||
List<dynamic> get list {
|
List<dynamic>? get list {
|
||||||
return (json.decode((values['list'] as String)) as List);
|
return (json.decode((values['list'] as String)) as List);
|
||||||
}
|
}
|
||||||
|
|
||||||
set list(List<dynamic> value) => values['list'] = json.encode(value);
|
set list(List<dynamic>? value) => values['list'] = json.encode(value);
|
||||||
void copyFrom(HasMap model) {
|
void copyFrom(HasMap model) {
|
||||||
value = model.value;
|
value = model.value;
|
||||||
list = model.list;
|
list = model.list;
|
||||||
|
@ -123,17 +124,16 @@ class HasMapQueryValues extends MapQueryValues {
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class HasMap implements _HasMap {
|
class HasMap implements _HasMap {
|
||||||
const HasMap({this.value = const {}, this.list = const []});
|
HasMap({this.value, this.list = const []});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final Map<dynamic, dynamic> value;
|
Map<dynamic, dynamic>? value;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<dynamic> list;
|
List<dynamic>? list;
|
||||||
|
|
||||||
HasMap copyWith(
|
HasMap copyWith({Map<dynamic, dynamic>? value, List<dynamic>? list}) {
|
||||||
{Map<dynamic, dynamic> value = const {}, List<dynamic> list = const []}) {
|
return HasMap(value: value ?? this.value, list: list ?? this.list);
|
||||||
return HasMap(value: value, list: list);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -197,7 +197,10 @@ class HasMapSerializer extends Codec<HasMap, Map> {
|
||||||
: []);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_HasMap model) {
|
static Map<String, dynamic> toMap(_HasMap? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {'value': model.value, 'list': model.list};
|
return {'value': model.value, 'list': model.list};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
library angel3_orm_generator.test.models.leg;
|
library angel3_orm_generator.test.models.leg;
|
||||||
|
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
|
@ -13,7 +13,7 @@ class LegMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.varChar('name');
|
table.varChar('name', length: 255);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,7 +31,8 @@ class FootMigration extends Migration {
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.integer('leg_id');
|
table.integer('leg_id');
|
||||||
table.declare('n_toes', ColumnType('decimal'));
|
table.declareColumn(
|
||||||
|
'n_toes', Column(type: ColumnType('decimal'), length: 255));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,7 +68,7 @@ class LegQuery extends Query<Leg, LegQueryWhere> {
|
||||||
|
|
||||||
LegQueryWhere? _where;
|
LegQueryWhere? _where;
|
||||||
|
|
||||||
FootQuery? _foot;
|
late FootQuery _foot;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -117,7 +118,7 @@ class LegQuery extends Query<Leg, LegQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
FootQuery? get foot {
|
FootQuery get foot {
|
||||||
return _foot;
|
return _foot;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -416,7 +417,7 @@ class Foot extends _Foot {
|
||||||
return 'Foot(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, legId=$legId, nToes=$nToes)';
|
return 'Foot(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, legId=$legId, nToes=$nToes)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return FootSerializer.toMap(this);
|
return FootSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -453,12 +454,12 @@ class LegSerializer extends Codec<Leg, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
foot: map['foot'] != null
|
foot: map['foot'] != null
|
||||||
|
@ -467,7 +468,10 @@ class LegSerializer extends Codec<Leg, Map> {
|
||||||
name: map['name'] as String?);
|
name: map['name'] as String?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Leg model) {
|
static Map<String, dynamic> toMap(_Leg? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
@ -500,11 +504,11 @@ abstract class LegFields {
|
||||||
|
|
||||||
const FootSerializer footSerializer = FootSerializer();
|
const FootSerializer footSerializer = FootSerializer();
|
||||||
|
|
||||||
class FootEncoder extends Converter<Foot, Map?> {
|
class FootEncoder extends Converter<Foot, Map> {
|
||||||
const FootEncoder();
|
const FootEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Foot model) => FootSerializer.toMap(model);
|
Map convert(Foot model) => FootSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class FootDecoder extends Converter<Map, Foot> {
|
class FootDecoder extends Converter<Map, Foot> {
|
||||||
|
@ -514,7 +518,7 @@ class FootDecoder extends Converter<Map, Foot> {
|
||||||
Foot convert(Map map) => FootSerializer.fromMap(map);
|
Foot convert(Map map) => FootSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class FootSerializer extends Codec<Foot, Map?> {
|
class FootSerializer extends Codec<Foot, Map> {
|
||||||
const FootSerializer();
|
const FootSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -526,21 +530,21 @@ class FootSerializer extends Codec<Foot, Map?> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
legId: map['leg_id'] as int?,
|
legId: map['leg_id'] as int?,
|
||||||
nToes: map['n_toes'] as double?);
|
nToes: map['n_toes'] as double?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Foot? model) {
|
static Map<String, dynamic> toMap(_Foot? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
library angel3_orm_generator.test.models.order;
|
library angel3_orm_generator.test.models.order;
|
||||||
|
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
|
@ -17,7 +17,7 @@ class OrderMigration extends Migration {
|
||||||
table.timeStamp('order_date');
|
table.timeStamp('order_date');
|
||||||
table.integer('shipper_id');
|
table.integer('shipper_id');
|
||||||
table
|
table
|
||||||
.declare('customer_id', ColumnType('serial'))
|
.declare('customer_id', ColumnType('int'))
|
||||||
.references('customers', 'id');
|
.references('customers', 'id');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -64,7 +64,7 @@ class OrderQuery extends Query<Order, OrderQueryWhere> {
|
||||||
|
|
||||||
OrderQueryWhere? _where;
|
OrderQueryWhere? _where;
|
||||||
|
|
||||||
CustomerQuery? _customer;
|
late CustomerQuery _customer;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -124,7 +124,7 @@ class OrderQuery extends Query<Order, OrderQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
CustomerQuery? get customer {
|
CustomerQuery get customer {
|
||||||
return _customer;
|
return _customer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -188,11 +188,11 @@ class OrderQueryValues extends MapQueryValues {
|
||||||
}
|
}
|
||||||
|
|
||||||
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||||
int? get customerId {
|
int get customerId {
|
||||||
return (values['customer_id'] as int?);
|
return (values['customer_id'] as int);
|
||||||
}
|
}
|
||||||
|
|
||||||
set customerId(int? value) => values['customer_id'] = value;
|
set customerId(int value) => values['customer_id'] = value;
|
||||||
int? get employeeId {
|
int? get employeeId {
|
||||||
return (values['employee_id'] as int?);
|
return (values['employee_id'] as int?);
|
||||||
}
|
}
|
||||||
|
@ -215,7 +215,7 @@ class OrderQueryValues extends MapQueryValues {
|
||||||
orderDate = model.orderDate;
|
orderDate = model.orderDate;
|
||||||
shipperId = model.shipperId;
|
shipperId = model.shipperId;
|
||||||
if (model.customer != null) {
|
if (model.customer != null) {
|
||||||
values['customer_id'] = model.customer!.id;
|
values['customer_id'] = model.customer?.id;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -348,16 +348,16 @@ class Order extends _Order {
|
||||||
DateTime? updatedAt;
|
DateTime? updatedAt;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _Customer? customer;
|
_Customer? customer;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final int? employeeId;
|
int? employeeId;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final DateTime? orderDate;
|
DateTime? orderDate;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final int? shipperId;
|
int? shipperId;
|
||||||
|
|
||||||
Order copyWith(
|
Order copyWith(
|
||||||
{String? id,
|
{String? id,
|
||||||
|
@ -446,7 +446,7 @@ class Customer extends _Customer {
|
||||||
return 'Customer(id=$id, createdAt=$createdAt, updatedAt=$updatedAt)';
|
return 'Customer(id=$id, createdAt=$createdAt, updatedAt=$updatedAt)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return CustomerSerializer.toMap(this);
|
return CustomerSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -483,12 +483,12 @@ class OrderSerializer extends Codec<Order, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
customer: map['customer'] != null
|
customer: map['customer'] != null
|
||||||
|
@ -497,13 +497,16 @@ class OrderSerializer extends Codec<Order, Map> {
|
||||||
employeeId: map['employee_id'] as int?,
|
employeeId: map['employee_id'] as int?,
|
||||||
orderDate: map['order_date'] != null
|
orderDate: map['order_date'] != null
|
||||||
? (map['order_date'] is DateTime
|
? (map['order_date'] is DateTime
|
||||||
? (map['order_date'] as DateTime?)
|
? (map['order_date'] as DateTime)
|
||||||
: DateTime.parse(map['order_date'].toString()))
|
: DateTime.parse(map['order_date'].toString()))
|
||||||
: null,
|
: null,
|
||||||
shipperId: map['shipper_id'] as int?);
|
shipperId: map['shipper_id'] as int?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Order model) {
|
static Map<String, dynamic> toMap(_Order? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
@ -544,11 +547,11 @@ abstract class OrderFields {
|
||||||
|
|
||||||
const CustomerSerializer customerSerializer = CustomerSerializer();
|
const CustomerSerializer customerSerializer = CustomerSerializer();
|
||||||
|
|
||||||
class CustomerEncoder extends Converter<Customer, Map?> {
|
class CustomerEncoder extends Converter<Customer, Map> {
|
||||||
const CustomerEncoder();
|
const CustomerEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Customer model) => CustomerSerializer.toMap(model);
|
Map convert(Customer model) => CustomerSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class CustomerDecoder extends Converter<Map, Customer> {
|
class CustomerDecoder extends Converter<Map, Customer> {
|
||||||
|
@ -558,7 +561,7 @@ class CustomerDecoder extends Converter<Map, Customer> {
|
||||||
Customer convert(Map map) => CustomerSerializer.fromMap(map);
|
Customer convert(Map map) => CustomerSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class CustomerSerializer extends Codec<Customer, Map?> {
|
class CustomerSerializer extends Codec<Customer, Map> {
|
||||||
const CustomerSerializer();
|
const CustomerSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -570,19 +573,19 @@ class CustomerSerializer extends Codec<Customer, Map?> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null);
|
: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Customer? model) {
|
static Map<String, dynamic> toMap(_Customer? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
library angel3_orm_generator.test.models.tree;
|
library angel3_orm_generator.test.models.tree;
|
||||||
|
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:collection/collection.dart';
|
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
||||||
part 'tree.g.dart';
|
part 'tree.g.dart';
|
||||||
|
@ -16,7 +14,7 @@ class _Tree extends Model {
|
||||||
int? rings;
|
int? rings;
|
||||||
|
|
||||||
@hasMany
|
@hasMany
|
||||||
List<_Fruit>? fruits;
|
List<_Fruit> fruits = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
@serializable
|
@serializable
|
||||||
|
|
|
@ -13,7 +13,7 @@ class TreeMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.declare('rings', ColumnType('smallint'));
|
table.integer('rings').unique();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ class FruitMigration extends Migration {
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.integer('tree_id');
|
table.integer('tree_id');
|
||||||
table.varChar('common_name');
|
table.varChar('common_name', length: 255);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ class TreeQuery extends Query<Tree, TreeQueryWhere> {
|
||||||
|
|
||||||
TreeQueryWhere? _where;
|
TreeQueryWhere? _where;
|
||||||
|
|
||||||
FruitQuery? _fruits;
|
late FruitQuery _fruits;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -117,7 +117,7 @@ class TreeQuery extends Query<Tree, TreeQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
FruitQuery? get fruits {
|
FruitQuery get fruits {
|
||||||
return _fruits;
|
return _fruits;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -382,20 +382,20 @@ class Tree extends _Tree {
|
||||||
int? rings;
|
int? rings;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
List<_Fruit>? fruits;
|
List<_Fruit> fruits;
|
||||||
|
|
||||||
Tree copyWith(
|
Tree copyWith(
|
||||||
{String? id,
|
{String? id,
|
||||||
DateTime? createdAt,
|
DateTime? createdAt,
|
||||||
DateTime? updatedAt,
|
DateTime? updatedAt,
|
||||||
int? rings,
|
int? rings,
|
||||||
List<_Fruit> fruits = const []}) {
|
List<_Fruit>? fruits}) {
|
||||||
return Tree(
|
return Tree(
|
||||||
id: id ?? this.id,
|
id: id ?? this.id,
|
||||||
createdAt: createdAt ?? this.createdAt,
|
createdAt: createdAt ?? this.createdAt,
|
||||||
updatedAt: updatedAt ?? this.updatedAt,
|
updatedAt: updatedAt ?? this.updatedAt,
|
||||||
rings: rings ?? this.rings,
|
rings: rings ?? this.rings,
|
||||||
fruits: fruits);
|
fruits: fruits ?? this.fruits);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -518,12 +518,12 @@ class TreeSerializer extends Codec<Tree, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
rings: map['rings'] as int?,
|
rings: map['rings'] as int?,
|
||||||
|
@ -533,13 +533,16 @@ class TreeSerializer extends Codec<Tree, Map> {
|
||||||
: []);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Tree model) {
|
static Map<String, dynamic> toMap(_Tree? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
'updated_at': model.updatedAt?.toIso8601String(),
|
'updated_at': model.updatedAt?.toIso8601String(),
|
||||||
'rings': model.rings,
|
'rings': model.rings,
|
||||||
'fruits': model.fruits?.map((m) => FruitSerializer.toMap(m)).toList()
|
'fruits': model.fruits.map((m) => FruitSerializer.toMap(m)).toList()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -592,19 +595,22 @@ class FruitSerializer extends Codec<Fruit, Map> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
treeId: map['tree_id'] as int?,
|
treeId: map['tree_id'] as int?,
|
||||||
commonName: map['common_name'] as String?);
|
commonName: map['common_name'] as String?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_Fruit model) {
|
static Map<String, dynamic> toMap(_Fruit? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'created_at': model.createdAt?.toIso8601String(),
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
@ -26,10 +25,10 @@ abstract class _WeirdJoin {
|
||||||
_Song? get song;
|
_Song? get song;
|
||||||
|
|
||||||
@HasMany(foreignKey: 'parent')
|
@HasMany(foreignKey: 'parent')
|
||||||
List<_Numba?>? get numbas;
|
List<_Numba> get numbas;
|
||||||
|
|
||||||
@ManyToMany(_FooPivot)
|
@ManyToMany(_FooPivot)
|
||||||
List<_Foo?>? get foos;
|
List<_Foo> get foos;
|
||||||
}
|
}
|
||||||
|
|
||||||
@serializable
|
@serializable
|
||||||
|
@ -59,7 +58,7 @@ abstract class _Foo {
|
||||||
String? get bar;
|
String? get bar;
|
||||||
|
|
||||||
@ManyToMany(_FooPivot)
|
@ManyToMany(_FooPivot)
|
||||||
List<_WeirdJoin?>? get weirdJoins;
|
List<_WeirdJoin> get weirdJoins;
|
||||||
}
|
}
|
||||||
|
|
||||||
@serializable
|
@serializable
|
||||||
|
|
|
@ -10,7 +10,7 @@ class UnorthodoxMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('unorthodoxes', (table) {
|
schema.create('unorthodoxes', (table) {
|
||||||
table.varChar('name');
|
table.varChar('name', length: 255).primaryKey();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ class SongMigration extends Migration {
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.integer('weird_join_id');
|
table.integer('weird_join_id');
|
||||||
table.varChar('title');
|
table.varChar('title', length: 255);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ class FooMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('foos', (table) {
|
schema.create('foos', (table) {
|
||||||
table.varChar('bar').primaryKey();
|
table.varChar('bar', length: 255).primaryKey();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,11 +223,11 @@ class WeirdJoinQuery extends Query<WeirdJoin, WeirdJoinQueryWhere> {
|
||||||
|
|
||||||
WeirdJoinQueryWhere? _where;
|
WeirdJoinQueryWhere? _where;
|
||||||
|
|
||||||
UnorthodoxQuery? _unorthodox;
|
late UnorthodoxQuery _unorthodox;
|
||||||
|
|
||||||
SongQuery? _song;
|
late SongQuery _song;
|
||||||
|
|
||||||
NumbaQuery? _numbas;
|
late NumbaQuery _numbas;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -291,22 +291,22 @@ class WeirdJoinQuery extends Query<WeirdJoin, WeirdJoinQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
UnorthodoxQuery? get unorthodox {
|
UnorthodoxQuery get unorthodox {
|
||||||
return _unorthodox;
|
return _unorthodox;
|
||||||
}
|
}
|
||||||
|
|
||||||
SongQuery? get song {
|
SongQuery get song {
|
||||||
return _song;
|
return _song;
|
||||||
}
|
}
|
||||||
|
|
||||||
NumbaQuery? get numbas {
|
NumbaQuery get numbas {
|
||||||
return _numbas;
|
return _numbas;
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool canCompile(trampoline) {
|
bool canCompile(trampoline) {
|
||||||
return (!(trampoline.contains('weird_joins') == true &&
|
return (!(trampoline.contains('weird_joins') &&
|
||||||
trampoline.contains('foo_pivots') == true));
|
trampoline.contains('foo_pivots')));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -322,8 +322,8 @@ class WeirdJoinQuery extends Query<WeirdJoin, WeirdJoinQueryWhere> {
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
numbas: List<_Numba>.from(l.numbas ?? [])
|
numbas: List<_Numba>.from(l.numbas ?? [])
|
||||||
..addAll(List<_Numba>.from(model.numbas ?? [])),
|
..addAll(model.numbas ?? []),
|
||||||
foos: List<_Foo?>.from(l.foos ?? [])..addAll(model.foos ?? []));
|
foos: List<_Foo>.from(l.foos ?? [])..addAll(model.foos ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -341,9 +341,9 @@ class WeirdJoinQuery extends Query<WeirdJoin, WeirdJoinQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
numbas: List<_Numba?>.from(l.numbas ?? [])
|
numbas: List<_Numba>.from(l.numbas ?? [])
|
||||||
..addAll(model.numbas ?? []),
|
..addAll(model.numbas ?? []),
|
||||||
foos: List<_Foo?>.from(l.foos ?? [])..addAll(model.foos ?? []));
|
foos: List<_Foo>.from(l.foos ?? [])..addAll(model.foos ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -361,9 +361,9 @@ class WeirdJoinQuery extends Query<WeirdJoin, WeirdJoinQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
numbas: List<_Numba?>.from(l.numbas ?? [])
|
numbas: List<_Numba>.from(l.numbas ?? [])
|
||||||
..addAll(model.numbas ?? []),
|
..addAll(model.numbas ?? []),
|
||||||
foos: List<_Foo?>.from(l.foos ?? [])..addAll(model.foos ?? []));
|
foos: List<_Foo>.from(l.foos ?? [])..addAll(model.foos ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -404,7 +404,7 @@ class WeirdJoinQueryValues extends MapQueryValues {
|
||||||
void copyFrom(WeirdJoin model) {
|
void copyFrom(WeirdJoin model) {
|
||||||
id = model.id;
|
id = model.id;
|
||||||
if (model.unorthodox != null) {
|
if (model.unorthodox != null) {
|
||||||
values['join_name'] = model.unorthodox!.name;
|
values['join_name'] = model.unorthodox?.name;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -670,7 +670,7 @@ class FooQuery extends Query<Foo, FooQueryWhere> {
|
||||||
model = model.copyWith(weirdJoins: [m]);
|
model = model.copyWith(weirdJoins: [m]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return Optional.ofNullable(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -680,8 +680,8 @@ class FooQuery extends Query<Foo, FooQueryWhere> {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool canCompile(trampoline) {
|
bool canCompile(trampoline) {
|
||||||
return (!(trampoline.contains('foos') == true &&
|
return (!(trampoline.contains('foos') &&
|
||||||
trampoline.contains('foo_pivots') == true));
|
trampoline.contains('foo_pivots')));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -789,9 +789,9 @@ class FooPivotQuery extends Query<FooPivot, FooPivotQueryWhere> {
|
||||||
|
|
||||||
FooPivotQueryWhere? _where;
|
FooPivotQueryWhere? _where;
|
||||||
|
|
||||||
WeirdJoinQuery? _weirdJoin;
|
late WeirdJoinQuery _weirdJoin;
|
||||||
|
|
||||||
FooQuery? _foo;
|
late FooQuery _foo;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -835,7 +835,7 @@ class FooPivotQuery extends Query<FooPivot, FooPivotQueryWhere> {
|
||||||
model = model.copyWith(foo: m);
|
model = model.copyWith(foo: m);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return Optional.ofNullable(model);
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -843,11 +843,11 @@ class FooPivotQuery extends Query<FooPivot, FooPivotQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
WeirdJoinQuery? get weirdJoin {
|
WeirdJoinQuery get weirdJoin {
|
||||||
return _weirdJoin;
|
return _weirdJoin;
|
||||||
}
|
}
|
||||||
|
|
||||||
FooQuery? get foo {
|
FooQuery get foo {
|
||||||
return _foo;
|
return _foo;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -885,10 +885,10 @@ class FooPivotQueryValues extends MapQueryValues {
|
||||||
set fooBar(String? value) => values['foo_bar'] = value;
|
set fooBar(String? value) => values['foo_bar'] = value;
|
||||||
void copyFrom(FooPivot model) {
|
void copyFrom(FooPivot model) {
|
||||||
if (model.weirdJoin != null) {
|
if (model.weirdJoin != null) {
|
||||||
values['weird_join_id'] = model.weirdJoin!.id;
|
values['weird_join_id'] = model.weirdJoin?.id;
|
||||||
}
|
}
|
||||||
if (model.foo != null) {
|
if (model.foo != null) {
|
||||||
values['foo_bar'] = model.foo!.bar;
|
values['foo_bar'] = model.foo?.bar;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -899,10 +899,10 @@ class FooPivotQueryValues extends MapQueryValues {
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class Unorthodox implements _Unorthodox {
|
class Unorthodox implements _Unorthodox {
|
||||||
const Unorthodox({this.name});
|
Unorthodox({this.name});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? name;
|
String? name;
|
||||||
|
|
||||||
Unorthodox copyWith({String? name}) {
|
Unorthodox copyWith({String? name}) {
|
||||||
return Unorthodox(name: name ?? this.name);
|
return Unorthodox(name: name ?? this.name);
|
||||||
|
@ -923,37 +923,41 @@ class Unorthodox implements _Unorthodox {
|
||||||
return 'Unorthodox(name=$name)';
|
return 'Unorthodox(name=$name)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return UnorthodoxSerializer.toMap(this);
|
return UnorthodoxSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class WeirdJoin implements _WeirdJoin {
|
class WeirdJoin implements _WeirdJoin {
|
||||||
const WeirdJoin(
|
WeirdJoin(
|
||||||
{this.id, this.unorthodox, this.song, this.numbas, this.foos});
|
{this.id,
|
||||||
|
this.unorthodox,
|
||||||
|
this.song,
|
||||||
|
this.numbas = const [],
|
||||||
|
this.foos = const []});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final int? id;
|
int? id;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _Unorthodox? unorthodox;
|
_Unorthodox? unorthodox;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _Song? song;
|
_Song? song;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<_Numba?>? numbas;
|
List<_Numba> numbas;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<_Foo?>? foos;
|
List<_Foo> foos;
|
||||||
|
|
||||||
WeirdJoin copyWith(
|
WeirdJoin copyWith(
|
||||||
{int? id,
|
{int? id,
|
||||||
_Unorthodox? unorthodox,
|
_Unorthodox? unorthodox,
|
||||||
_Song? song,
|
_Song? song,
|
||||||
List<_Numba?>? numbas,
|
List<_Numba>? numbas,
|
||||||
List<_Foo?>? foos}) {
|
List<_Foo>? foos}) {
|
||||||
return WeirdJoin(
|
return WeirdJoin(
|
||||||
id: id ?? this.id,
|
id: id ?? this.id,
|
||||||
unorthodox: unorthodox ?? this.unorthodox,
|
unorthodox: unorthodox ?? this.unorthodox,
|
||||||
|
@ -968,9 +972,9 @@ class WeirdJoin implements _WeirdJoin {
|
||||||
other.id == id &&
|
other.id == id &&
|
||||||
other.unorthodox == unorthodox &&
|
other.unorthodox == unorthodox &&
|
||||||
other.song == song &&
|
other.song == song &&
|
||||||
ListEquality<_Numba?>(DefaultEquality<_Numba>())
|
ListEquality<_Numba>(DefaultEquality<_Numba>())
|
||||||
.equals(other.numbas, numbas) &&
|
.equals(other.numbas, numbas) &&
|
||||||
ListEquality<_Foo?>(DefaultEquality<_Foo>()).equals(other.foos, foos);
|
ListEquality<_Foo>(DefaultEquality<_Foo>()).equals(other.foos, foos);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -983,7 +987,7 @@ class WeirdJoin implements _WeirdJoin {
|
||||||
return 'WeirdJoin(id=$id, unorthodox=$unorthodox, song=$song, numbas=$numbas, foos=$foos)';
|
return 'WeirdJoin(id=$id, unorthodox=$unorthodox, song=$song, numbas=$numbas, foos=$foos)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return WeirdJoinSerializer.toMap(this);
|
return WeirdJoinSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1005,10 +1009,10 @@ class Song extends _Song {
|
||||||
DateTime? updatedAt;
|
DateTime? updatedAt;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final int? weirdJoinId;
|
int? weirdJoinId;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? title;
|
String? title;
|
||||||
|
|
||||||
Song copyWith(
|
Song copyWith(
|
||||||
{String? id,
|
{String? id,
|
||||||
|
@ -1044,7 +1048,7 @@ class Song extends _Song {
|
||||||
return 'Song(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, weirdJoinId=$weirdJoinId, title=$title)';
|
return 'Song(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, weirdJoinId=$weirdJoinId, title=$title)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return SongSerializer.toMap(this);
|
return SongSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1078,20 +1082,20 @@ class Numba extends _Numba {
|
||||||
return 'Numba(i=$i, parent=$parent)';
|
return 'Numba(i=$i, parent=$parent)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return NumbaSerializer.toMap(this);
|
return NumbaSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class Foo implements _Foo {
|
class Foo implements _Foo {
|
||||||
const Foo({this.bar, this.weirdJoins});
|
Foo({this.bar, this.weirdJoins = const []});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? bar;
|
String? bar;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<_WeirdJoin>? weirdJoins;
|
List<_WeirdJoin> weirdJoins;
|
||||||
|
|
||||||
Foo copyWith({String? bar, List<_WeirdJoin>? weirdJoins}) {
|
Foo copyWith({String? bar, List<_WeirdJoin>? weirdJoins}) {
|
||||||
return Foo(bar: bar ?? this.bar, weirdJoins: weirdJoins ?? this.weirdJoins);
|
return Foo(bar: bar ?? this.bar, weirdJoins: weirdJoins ?? this.weirdJoins);
|
||||||
|
@ -1101,7 +1105,7 @@ class Foo implements _Foo {
|
||||||
bool operator ==(other) {
|
bool operator ==(other) {
|
||||||
return other is _Foo &&
|
return other is _Foo &&
|
||||||
other.bar == bar &&
|
other.bar == bar &&
|
||||||
ListEquality<_WeirdJoin?>(DefaultEquality<_WeirdJoin>())
|
ListEquality<_WeirdJoin>(DefaultEquality<_WeirdJoin>())
|
||||||
.equals(other.weirdJoins, weirdJoins);
|
.equals(other.weirdJoins, weirdJoins);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1115,20 +1119,20 @@ class Foo implements _Foo {
|
||||||
return 'Foo(bar=$bar, weirdJoins=$weirdJoins)';
|
return 'Foo(bar=$bar, weirdJoins=$weirdJoins)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return FooSerializer.toMap(this);
|
return FooSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class FooPivot implements _FooPivot {
|
class FooPivot implements _FooPivot {
|
||||||
const FooPivot({this.weirdJoin, this.foo});
|
FooPivot({this.weirdJoin, this.foo});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _WeirdJoin? weirdJoin;
|
_WeirdJoin? weirdJoin;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _Foo? foo;
|
_Foo? foo;
|
||||||
|
|
||||||
FooPivot copyWith({_WeirdJoin? weirdJoin, _Foo? foo}) {
|
FooPivot copyWith({_WeirdJoin? weirdJoin, _Foo? foo}) {
|
||||||
return FooPivot(
|
return FooPivot(
|
||||||
|
@ -1163,11 +1167,11 @@ class FooPivot implements _FooPivot {
|
||||||
|
|
||||||
const UnorthodoxSerializer unorthodoxSerializer = UnorthodoxSerializer();
|
const UnorthodoxSerializer unorthodoxSerializer = UnorthodoxSerializer();
|
||||||
|
|
||||||
class UnorthodoxEncoder extends Converter<Unorthodox, Map?> {
|
class UnorthodoxEncoder extends Converter<Unorthodox, Map> {
|
||||||
const UnorthodoxEncoder();
|
const UnorthodoxEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Unorthodox model) => UnorthodoxSerializer.toMap(model);
|
Map convert(Unorthodox model) => UnorthodoxSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class UnorthodoxDecoder extends Converter<Map, Unorthodox> {
|
class UnorthodoxDecoder extends Converter<Map, Unorthodox> {
|
||||||
|
@ -1177,7 +1181,7 @@ class UnorthodoxDecoder extends Converter<Map, Unorthodox> {
|
||||||
Unorthodox convert(Map map) => UnorthodoxSerializer.fromMap(map);
|
Unorthodox convert(Map map) => UnorthodoxSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class UnorthodoxSerializer extends Codec<Unorthodox, Map?> {
|
class UnorthodoxSerializer extends Codec<Unorthodox, Map> {
|
||||||
const UnorthodoxSerializer();
|
const UnorthodoxSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -1188,9 +1192,9 @@ class UnorthodoxSerializer extends Codec<Unorthodox, Map?> {
|
||||||
return Unorthodox(name: map['name'] as String?);
|
return Unorthodox(name: map['name'] as String?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Unorthodox? model) {
|
static Map<String, dynamic> toMap(_Unorthodox? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {'name': model.name};
|
return {'name': model.name};
|
||||||
}
|
}
|
||||||
|
@ -1204,11 +1208,11 @@ abstract class UnorthodoxFields {
|
||||||
|
|
||||||
const WeirdJoinSerializer weirdJoinSerializer = WeirdJoinSerializer();
|
const WeirdJoinSerializer weirdJoinSerializer = WeirdJoinSerializer();
|
||||||
|
|
||||||
class WeirdJoinEncoder extends Converter<WeirdJoin, Map?> {
|
class WeirdJoinEncoder extends Converter<WeirdJoin, Map> {
|
||||||
const WeirdJoinEncoder();
|
const WeirdJoinEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(WeirdJoin model) => WeirdJoinSerializer.toMap(model);
|
Map convert(WeirdJoin model) => WeirdJoinSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class WeirdJoinDecoder extends Converter<Map, WeirdJoin> {
|
class WeirdJoinDecoder extends Converter<Map, WeirdJoin> {
|
||||||
|
@ -1218,7 +1222,7 @@ class WeirdJoinDecoder extends Converter<Map, WeirdJoin> {
|
||||||
WeirdJoin convert(Map map) => WeirdJoinSerializer.fromMap(map);
|
WeirdJoin convert(Map map) => WeirdJoinSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class WeirdJoinSerializer extends Codec<WeirdJoin, Map?> {
|
class WeirdJoinSerializer extends Codec<WeirdJoin, Map> {
|
||||||
const WeirdJoinSerializer();
|
const WeirdJoinSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -1237,23 +1241,23 @@ class WeirdJoinSerializer extends Codec<WeirdJoin, Map?> {
|
||||||
numbas: map['numbas'] is Iterable
|
numbas: map['numbas'] is Iterable
|
||||||
? List.unmodifiable(((map['numbas'] as Iterable).whereType<Map>())
|
? List.unmodifiable(((map['numbas'] as Iterable).whereType<Map>())
|
||||||
.map(NumbaSerializer.fromMap))
|
.map(NumbaSerializer.fromMap))
|
||||||
: null,
|
: [],
|
||||||
foos: map['foos'] is Iterable
|
foos: map['foos'] is Iterable
|
||||||
? List.unmodifiable(((map['foos'] as Iterable).whereType<Map>())
|
? List.unmodifiable(((map['foos'] as Iterable).whereType<Map>())
|
||||||
.map(FooSerializer.fromMap))
|
.map(FooSerializer.fromMap))
|
||||||
: null);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_WeirdJoin? model) {
|
static Map<String, dynamic> toMap(_WeirdJoin? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
'unorthodox': UnorthodoxSerializer.toMap(model.unorthodox),
|
'unorthodox': UnorthodoxSerializer.toMap(model.unorthodox),
|
||||||
'song': SongSerializer.toMap(model.song),
|
'song': SongSerializer.toMap(model.song),
|
||||||
'numbas': model.numbas?.map((m) => NumbaSerializer.toMap(m)).toList(),
|
'numbas': model.numbas.map((m) => NumbaSerializer.toMap(m)).toList(),
|
||||||
'foos': model.foos?.map((m) => FooSerializer.toMap(m)).toList()
|
'foos': model.foos.map((m) => FooSerializer.toMap(m)).toList()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1280,11 +1284,11 @@ abstract class WeirdJoinFields {
|
||||||
|
|
||||||
const SongSerializer songSerializer = SongSerializer();
|
const SongSerializer songSerializer = SongSerializer();
|
||||||
|
|
||||||
class SongEncoder extends Converter<Song, Map?> {
|
class SongEncoder extends Converter<Song, Map> {
|
||||||
const SongEncoder();
|
const SongEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Song model) => SongSerializer.toMap(model);
|
Map convert(Song model) => SongSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class SongDecoder extends Converter<Map, Song> {
|
class SongDecoder extends Converter<Map, Song> {
|
||||||
|
@ -1294,7 +1298,7 @@ class SongDecoder extends Converter<Map, Song> {
|
||||||
Song convert(Map map) => SongSerializer.fromMap(map);
|
Song convert(Map map) => SongSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class SongSerializer extends Codec<Song, Map?> {
|
class SongSerializer extends Codec<Song, Map> {
|
||||||
const SongSerializer();
|
const SongSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -1306,21 +1310,21 @@ class SongSerializer extends Codec<Song, Map?> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
weirdJoinId: map['weird_join_id'] as int?,
|
weirdJoinId: map['weird_join_id'] as int?,
|
||||||
title: map['title'] as String?);
|
title: map['title'] as String?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Song? model) {
|
static Map<String, dynamic> toMap(_Song? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
|
@ -1354,11 +1358,11 @@ abstract class SongFields {
|
||||||
|
|
||||||
const NumbaSerializer numbaSerializer = NumbaSerializer();
|
const NumbaSerializer numbaSerializer = NumbaSerializer();
|
||||||
|
|
||||||
class NumbaEncoder extends Converter<Numba, Map?> {
|
class NumbaEncoder extends Converter<Numba, Map> {
|
||||||
const NumbaEncoder();
|
const NumbaEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Numba model) => NumbaSerializer.toMap(model);
|
Map convert(Numba model) => NumbaSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class NumbaDecoder extends Converter<Map, Numba> {
|
class NumbaDecoder extends Converter<Map, Numba> {
|
||||||
|
@ -1368,7 +1372,7 @@ class NumbaDecoder extends Converter<Map, Numba> {
|
||||||
Numba convert(Map map) => NumbaSerializer.fromMap(map);
|
Numba convert(Map map) => NumbaSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class NumbaSerializer extends Codec<Numba, Map?> {
|
class NumbaSerializer extends Codec<Numba, Map> {
|
||||||
const NumbaSerializer();
|
const NumbaSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -1379,9 +1383,9 @@ class NumbaSerializer extends Codec<Numba, Map?> {
|
||||||
return Numba(i: map['i'] as int?, parent: map['parent'] as int?);
|
return Numba(i: map['i'] as int?, parent: map['parent'] as int?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Numba? model) {
|
static Map<String, dynamic> toMap(_Numba? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {'i': model.i, 'parent': model.parent};
|
return {'i': model.i, 'parent': model.parent};
|
||||||
}
|
}
|
||||||
|
@ -1397,11 +1401,11 @@ abstract class NumbaFields {
|
||||||
|
|
||||||
const FooSerializer fooSerializer = FooSerializer();
|
const FooSerializer fooSerializer = FooSerializer();
|
||||||
|
|
||||||
class FooEncoder extends Converter<Foo, Map?> {
|
class FooEncoder extends Converter<Foo, Map> {
|
||||||
const FooEncoder();
|
const FooEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Foo model) => FooSerializer.toMap(model);
|
Map convert(Foo model) => FooSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class FooDecoder extends Converter<Map, Foo> {
|
class FooDecoder extends Converter<Map, Foo> {
|
||||||
|
@ -1411,7 +1415,7 @@ class FooDecoder extends Converter<Map, Foo> {
|
||||||
Foo convert(Map map) => FooSerializer.fromMap(map);
|
Foo convert(Map map) => FooSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class FooSerializer extends Codec<Foo, Map?> {
|
class FooSerializer extends Codec<Foo, Map> {
|
||||||
const FooSerializer();
|
const FooSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -1425,17 +1429,17 @@ class FooSerializer extends Codec<Foo, Map?> {
|
||||||
? List.unmodifiable(
|
? List.unmodifiable(
|
||||||
((map['weird_joins'] as Iterable).whereType<Map>())
|
((map['weird_joins'] as Iterable).whereType<Map>())
|
||||||
.map(WeirdJoinSerializer.fromMap))
|
.map(WeirdJoinSerializer.fromMap))
|
||||||
: null);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Foo? model) {
|
static Map<String, dynamic> toMap(_Foo? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'bar': model.bar,
|
'bar': model.bar,
|
||||||
'weird_joins':
|
'weird_joins':
|
||||||
model.weirdJoins?.map((m) => WeirdJoinSerializer.toMap(m)).toList()
|
model.weirdJoins.map((m) => WeirdJoinSerializer.toMap(m)).toList()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1481,7 +1485,10 @@ class FooPivotSerializer extends Codec<FooPivot, Map> {
|
||||||
: null);
|
: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_FooPivot model) {
|
static Map<String, dynamic> toMap(_FooPivot? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'weird_join': WeirdJoinSerializer.toMap(model.weirdJoin),
|
'weird_join': WeirdJoinSerializer.toMap(model.weirdJoin),
|
||||||
'foo': FooSerializer.toMap(model.foo)
|
'foo': FooSerializer.toMap(model.foo)
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
library angel3_orm_generator.test.models.user;
|
library angel3_orm_generator.test.models.user;
|
||||||
|
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:collection/collection.dart';
|
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
|
||||||
part 'user.g.dart';
|
part 'user.g.dart';
|
||||||
|
|
|
@ -13,9 +13,9 @@ class UserMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.varChar('username');
|
table.varChar('username', length: 255);
|
||||||
table.varChar('password');
|
table.varChar('password', length: 255);
|
||||||
table.varChar('email');
|
table.varChar('email', length: 255);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,8 +29,8 @@ class RoleUserMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('role_users', (table) {
|
schema.create('role_users', (table) {
|
||||||
table.declare('role_id', ColumnType('serial')).references('roles', 'id');
|
table.declare('role_id', ColumnType('int')).references('roles', 'id');
|
||||||
table.declare('user_id', ColumnType('serial')).references('users', 'id');
|
table.declare('user_id', ColumnType('int')).references('users', 'id');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ class RoleMigration extends Migration {
|
||||||
table.serial('id').primaryKey();
|
table.serial('id').primaryKey();
|
||||||
table.timeStamp('created_at');
|
table.timeStamp('created_at');
|
||||||
table.timeStamp('updated_at');
|
table.timeStamp('updated_at');
|
||||||
table.varChar('name');
|
table.varChar('name', length: 255);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,9 +123,9 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
password: (row[4] as String?),
|
password: (row[4] as String?),
|
||||||
email: (row[5] as String?));
|
email: (row[5] as String?));
|
||||||
if (row.length > 6) {
|
if (row.length > 6) {
|
||||||
var roleOpt = RoleQuery.parseRow(row.skip(6).take(4).toList());
|
var modelOpt = RoleQuery.parseRow(row.skip(6).take(4).toList());
|
||||||
roleOpt.ifPresent((role) {
|
modelOpt.ifPresent((m) {
|
||||||
model = model.copyWith(roles: [role]);
|
model = model.copyWith(roles: [m]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return Optional.of(model);
|
return Optional.of(model);
|
||||||
|
@ -138,8 +138,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool canCompile(trampoline) {
|
bool canCompile(trampoline) {
|
||||||
return (!(trampoline.contains('users') == true &&
|
return (!(trampoline.contains('users') &&
|
||||||
trampoline.contains('role_users') == true));
|
trampoline.contains('role_users')));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -154,7 +154,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
roles: List<_Role>.from(l.roles)..addAll(model.roles));
|
roles: List<_Role>.from(l.roles ?? [])
|
||||||
|
..addAll(model.roles ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -172,7 +173,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
roles: List<_Role>.from(l.roles)..addAll(model.roles));
|
roles: List<_Role>.from(l.roles ?? [])
|
||||||
|
..addAll(model.roles ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -190,7 +192,8 @@ class UserQuery extends Query<User, UserQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
roles: List<_Role>.from(l.roles)..addAll(model.roles));
|
roles: List<_Role>.from(l.roles ?? [])
|
||||||
|
..addAll(model.roles ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -297,9 +300,9 @@ class RoleUserQuery extends Query<RoleUser, RoleUserQueryWhere> {
|
||||||
|
|
||||||
RoleUserQueryWhere? _where;
|
RoleUserQueryWhere? _where;
|
||||||
|
|
||||||
RoleQuery? _role;
|
late RoleQuery _role;
|
||||||
|
|
||||||
UserQuery? _user;
|
late UserQuery _user;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map<String, String> get casts {
|
Map<String, String> get casts {
|
||||||
|
@ -332,15 +335,15 @@ class RoleUserQuery extends Query<RoleUser, RoleUserQueryWhere> {
|
||||||
}
|
}
|
||||||
var model = RoleUser();
|
var model = RoleUser();
|
||||||
if (row.length > 2) {
|
if (row.length > 2) {
|
||||||
var roleOpt = RoleQuery.parseRow(row.skip(2).take(4).toList());
|
var modelOpt = RoleQuery.parseRow(row.skip(2).take(4).toList());
|
||||||
roleOpt.ifPresent((role) {
|
modelOpt.ifPresent((m) {
|
||||||
model = model.copyWith(role: role);
|
model = model.copyWith(role: m);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (row.length > 6) {
|
if (row.length > 6) {
|
||||||
var userOpt = UserQuery.parseRow(row.skip(6).take(6).toList());
|
var modelOpt = UserQuery.parseRow(row.skip(6).take(6).toList());
|
||||||
userOpt.ifPresent((user) {
|
modelOpt.ifPresent((m) {
|
||||||
model = model.copyWith(user: user);
|
model = model.copyWith(user: m);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return Optional.of(model);
|
return Optional.of(model);
|
||||||
|
@ -351,11 +354,11 @@ class RoleUserQuery extends Query<RoleUser, RoleUserQueryWhere> {
|
||||||
return parseRow(row);
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
RoleQuery? get role {
|
RoleQuery get role {
|
||||||
return _role;
|
return _role;
|
||||||
}
|
}
|
||||||
|
|
||||||
UserQuery? get user {
|
UserQuery get user {
|
||||||
return _user;
|
return _user;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -381,22 +384,22 @@ class RoleUserQueryValues extends MapQueryValues {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
int? get roleId {
|
int get roleId {
|
||||||
return (values['role_id'] as int?);
|
return (values['role_id'] as int);
|
||||||
}
|
}
|
||||||
|
|
||||||
set roleId(int? value) => values['role_id'] = value;
|
set roleId(int value) => values['role_id'] = value;
|
||||||
int? get userId {
|
int get userId {
|
||||||
return (values['user_id'] as int?);
|
return (values['user_id'] as int);
|
||||||
}
|
}
|
||||||
|
|
||||||
set userId(int? value) => values['user_id'] = value;
|
set userId(int value) => values['user_id'] = value;
|
||||||
void copyFrom(RoleUser model) {
|
void copyFrom(RoleUser model) {
|
||||||
if (model.role != null) {
|
if (model.role != null) {
|
||||||
values['role_id'] = model.role!.id;
|
values['role_id'] = model.role?.id;
|
||||||
}
|
}
|
||||||
if (model.user != null) {
|
if (model.user != null) {
|
||||||
values['user_id'] = model.user!.id;
|
values['user_id'] = model.user?.id;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -476,8 +479,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool canCompile(trampoline) {
|
bool canCompile(trampoline) {
|
||||||
return (!(trampoline.contains('roles') == true &&
|
return (!(trampoline.contains('roles') &&
|
||||||
trampoline.contains('role_users') == true));
|
trampoline.contains('role_users')));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -492,7 +495,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
users: List<_User>.from(l.users)..addAll(model.users));
|
users: List<_User>.from(l.users ?? [])
|
||||||
|
..addAll(model.users ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -510,7 +514,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
users: List<_User>.from(l.users)..addAll(model.users));
|
users: List<_User>.from(l.users ?? [])
|
||||||
|
..addAll(model.users ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -528,7 +533,8 @@ class RoleQuery extends Query<Role, RoleQueryWhere> {
|
||||||
var l = out[idx];
|
var l = out[idx];
|
||||||
return out
|
return out
|
||||||
..[idx] = l.copyWith(
|
..[idx] = l.copyWith(
|
||||||
users: List<_User>.from(l.users)..addAll(model.users));
|
users: List<_User>.from(l.users ?? [])
|
||||||
|
..addAll(model.users ?? []));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -618,16 +624,16 @@ class User extends _User {
|
||||||
DateTime? updatedAt;
|
DateTime? updatedAt;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? username;
|
String? username;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? password;
|
String? password;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final String? email;
|
String? email;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<_Role> roles;
|
List<_Role> roles;
|
||||||
|
|
||||||
User copyWith(
|
User copyWith(
|
||||||
{String? id,
|
{String? id,
|
||||||
|
@ -636,7 +642,7 @@ class User extends _User {
|
||||||
String? username,
|
String? username,
|
||||||
String? password,
|
String? password,
|
||||||
String? email,
|
String? email,
|
||||||
List<_Role> roles = const []}) {
|
List<_Role>? roles}) {
|
||||||
return User(
|
return User(
|
||||||
id: id ?? this.id,
|
id: id ?? this.id,
|
||||||
createdAt: createdAt ?? this.createdAt,
|
createdAt: createdAt ?? this.createdAt,
|
||||||
|
@ -644,7 +650,7 @@ class User extends _User {
|
||||||
username: username ?? this.username,
|
username: username ?? this.username,
|
||||||
password: password ?? this.password,
|
password: password ?? this.password,
|
||||||
email: email ?? this.email,
|
email: email ?? this.email,
|
||||||
roles: roles);
|
roles: roles ?? this.roles);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -671,20 +677,20 @@ class User extends _User {
|
||||||
return 'User(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, username=$username, password=$password, email=$email, roles=$roles)';
|
return 'User(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, username=$username, password=$password, email=$email, roles=$roles)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return UserSerializer.toMap(this);
|
return UserSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class RoleUser implements _RoleUser {
|
class RoleUser implements _RoleUser {
|
||||||
const RoleUser({this.role, this.user});
|
RoleUser({this.role, this.user});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _Role? role;
|
_Role? role;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final _User? user;
|
_User? user;
|
||||||
|
|
||||||
RoleUser copyWith({_Role? role, _User? user}) {
|
RoleUser copyWith({_Role? role, _User? user}) {
|
||||||
return RoleUser(role: role ?? this.role, user: user ?? this.user);
|
return RoleUser(role: role ?? this.role, user: user ?? this.user);
|
||||||
|
@ -713,8 +719,12 @@ class RoleUser implements _RoleUser {
|
||||||
@generatedSerializable
|
@generatedSerializable
|
||||||
class Role extends _Role {
|
class Role extends _Role {
|
||||||
Role(
|
Role(
|
||||||
{this.id, this.createdAt, this.updatedAt, this.name, List<_User?>? users})
|
{this.id,
|
||||||
: users = List.unmodifiable(users ?? []);
|
this.createdAt,
|
||||||
|
this.updatedAt,
|
||||||
|
this.name,
|
||||||
|
List<_User> users = const []})
|
||||||
|
: users = List.unmodifiable(users);
|
||||||
|
|
||||||
/// A unique identifier corresponding to this item.
|
/// A unique identifier corresponding to this item.
|
||||||
@override
|
@override
|
||||||
|
@ -732,14 +742,14 @@ class Role extends _Role {
|
||||||
String? name;
|
String? name;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
final List<_User> users;
|
List<_User> users;
|
||||||
|
|
||||||
Role copyWith(
|
Role copyWith(
|
||||||
{String? id,
|
{String? id,
|
||||||
DateTime? createdAt,
|
DateTime? createdAt,
|
||||||
DateTime? updatedAt,
|
DateTime? updatedAt,
|
||||||
String? name,
|
String? name,
|
||||||
List<_User?>? users}) {
|
List<_User>? users}) {
|
||||||
return Role(
|
return Role(
|
||||||
id: id ?? this.id,
|
id: id ?? this.id,
|
||||||
createdAt: createdAt ?? this.createdAt,
|
createdAt: createdAt ?? this.createdAt,
|
||||||
|
@ -769,7 +779,7 @@ class Role extends _Role {
|
||||||
return 'Role(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, name=$name, users=$users)';
|
return 'Role(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, name=$name, users=$users)';
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic>? toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
return RoleSerializer.toMap(this);
|
return RoleSerializer.toMap(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -780,11 +790,11 @@ class Role extends _Role {
|
||||||
|
|
||||||
const UserSerializer userSerializer = UserSerializer();
|
const UserSerializer userSerializer = UserSerializer();
|
||||||
|
|
||||||
class UserEncoder extends Converter<User, Map?> {
|
class UserEncoder extends Converter<User, Map> {
|
||||||
const UserEncoder();
|
const UserEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(User model) => UserSerializer.toMap(model);
|
Map convert(User model) => UserSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class UserDecoder extends Converter<Map, User> {
|
class UserDecoder extends Converter<Map, User> {
|
||||||
|
@ -794,7 +804,7 @@ class UserDecoder extends Converter<Map, User> {
|
||||||
User convert(Map map) => UserSerializer.fromMap(map);
|
User convert(Map map) => UserSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class UserSerializer extends Codec<User, Map?> {
|
class UserSerializer extends Codec<User, Map> {
|
||||||
const UserSerializer();
|
const UserSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -806,12 +816,12 @@ class UserSerializer extends Codec<User, Map?> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
username: map['username'] as String?,
|
username: map['username'] as String?,
|
||||||
|
@ -823,9 +833,9 @@ class UserSerializer extends Codec<User, Map?> {
|
||||||
: []);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_User? model) {
|
static Map<String, dynamic> toMap(_User? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
|
@ -898,7 +908,10 @@ class RoleUserSerializer extends Codec<RoleUser, Map> {
|
||||||
: null);
|
: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_RoleUser model) {
|
static Map<String, dynamic> toMap(_RoleUser? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
'role': RoleSerializer.toMap(model.role),
|
'role': RoleSerializer.toMap(model.role),
|
||||||
'user': UserSerializer.toMap(model.user)
|
'user': UserSerializer.toMap(model.user)
|
||||||
|
@ -916,11 +929,11 @@ abstract class RoleUserFields {
|
||||||
|
|
||||||
const RoleSerializer roleSerializer = RoleSerializer();
|
const RoleSerializer roleSerializer = RoleSerializer();
|
||||||
|
|
||||||
class RoleEncoder extends Converter<Role, Map?> {
|
class RoleEncoder extends Converter<Role, Map> {
|
||||||
const RoleEncoder();
|
const RoleEncoder();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Map? convert(Role model) => RoleSerializer.toMap(model);
|
Map convert(Role model) => RoleSerializer.toMap(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
class RoleDecoder extends Converter<Map, Role> {
|
class RoleDecoder extends Converter<Map, Role> {
|
||||||
|
@ -930,7 +943,7 @@ class RoleDecoder extends Converter<Map, Role> {
|
||||||
Role convert(Map map) => RoleSerializer.fromMap(map);
|
Role convert(Map map) => RoleSerializer.fromMap(map);
|
||||||
}
|
}
|
||||||
|
|
||||||
class RoleSerializer extends Codec<Role, Map?> {
|
class RoleSerializer extends Codec<Role, Map> {
|
||||||
const RoleSerializer();
|
const RoleSerializer();
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -942,24 +955,24 @@ class RoleSerializer extends Codec<Role, Map?> {
|
||||||
id: map['id'] as String?,
|
id: map['id'] as String?,
|
||||||
createdAt: map['created_at'] != null
|
createdAt: map['created_at'] != null
|
||||||
? (map['created_at'] is DateTime
|
? (map['created_at'] is DateTime
|
||||||
? (map['created_at'] as DateTime?)
|
? (map['created_at'] as DateTime)
|
||||||
: DateTime.parse(map['created_at'].toString()))
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
updatedAt: map['updated_at'] != null
|
updatedAt: map['updated_at'] != null
|
||||||
? (map['updated_at'] is DateTime
|
? (map['updated_at'] is DateTime
|
||||||
? (map['updated_at'] as DateTime?)
|
? (map['updated_at'] as DateTime)
|
||||||
: DateTime.parse(map['updated_at'].toString()))
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
: null,
|
: null,
|
||||||
name: map['name'] as String?,
|
name: map['name'] as String?,
|
||||||
users: map['users'] is Iterable
|
users: map['users'] is Iterable
|
||||||
? List.unmodifiable(((map['users'] as Iterable).whereType<Map>())
|
? List.unmodifiable(((map['users'] as Iterable).whereType<Map>())
|
||||||
.map(UserSerializer.fromMap))
|
.map(UserSerializer.fromMap))
|
||||||
: null);
|
: []);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic>? toMap(_Role? model) {
|
static Map<String, dynamic> toMap(_Role? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return null;
|
return {};
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
'id': model.id,
|
'id': model.id,
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import 'package:angel3_migration/angel3_migration.dart';
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
//import 'package:angel3_model/angel3_model.dart';
|
|
||||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
import 'package:angel3_orm/angel3_orm.dart';
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
import 'package:optional/optional.dart';
|
import 'package:optional/optional.dart';
|
||||||
|
@ -9,6 +8,7 @@ part 'world.g.dart';
|
||||||
@serializable
|
@serializable
|
||||||
@Orm(tableName: 'world')
|
@Orm(tableName: 'world')
|
||||||
abstract class _World {
|
abstract class _World {
|
||||||
|
@primaryKey
|
||||||
int? id;
|
int? id;
|
||||||
|
|
||||||
@Column()
|
@Column()
|
||||||
|
|
|
@ -10,8 +10,8 @@ class WorldMigration extends Migration {
|
||||||
@override
|
@override
|
||||||
void up(Schema schema) {
|
void up(Schema schema) {
|
||||||
schema.create('world', (table) {
|
schema.create('world', (table) {
|
||||||
table.integer('id');
|
table.integer('id').primaryKey();
|
||||||
table.integer('randomNumber');
|
table.integer('random_number');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ class WorldQuery extends Query<World, WorldQueryWhere> {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
List<String> get fields {
|
List<String> get fields {
|
||||||
return const ['id', 'randomNumber'];
|
return const ['id', 'random_number'];
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -62,24 +62,24 @@ class WorldQuery extends Query<World, WorldQueryWhere> {
|
||||||
return WorldQueryWhere(this);
|
return WorldQueryWhere(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
static World? parseRow(List row) {
|
static Optional<World> parseRow(List row) {
|
||||||
if (row.every((x) => x == null)) {
|
if (row.every((x) => x == null)) {
|
||||||
return null;
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
var model = World(id: (row[0] as int?), randomNumber: (row[1] as int?));
|
var model = World(id: (row[0] as int?), randomNumber: (row[1] as int?));
|
||||||
return model;
|
return Optional.of(model);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Optional<World> deserialize(List row) {
|
Optional<World> deserialize(List row) {
|
||||||
return Optional.ofNullable(parseRow(row));
|
return parseRow(row);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class WorldQueryWhere extends QueryWhere {
|
class WorldQueryWhere extends QueryWhere {
|
||||||
WorldQueryWhere(WorldQuery query)
|
WorldQueryWhere(WorldQuery query)
|
||||||
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||||
randomNumber = NumericSqlExpressionBuilder<int>(query, 'randomNumber');
|
randomNumber = NumericSqlExpressionBuilder<int>(query, 'random_number');
|
||||||
|
|
||||||
final NumericSqlExpressionBuilder<int> id;
|
final NumericSqlExpressionBuilder<int> id;
|
||||||
|
|
||||||
|
@ -103,10 +103,10 @@ class WorldQueryValues extends MapQueryValues {
|
||||||
|
|
||||||
set id(int? value) => values['id'] = value;
|
set id(int? value) => values['id'] = value;
|
||||||
int? get randomNumber {
|
int? get randomNumber {
|
||||||
return (values['randomNumber'] as int?);
|
return (values['random_number'] as int?);
|
||||||
}
|
}
|
||||||
|
|
||||||
set randomNumber(int? value) => values['randomNumber'] = value;
|
set randomNumber(int? value) => values['random_number'] = value;
|
||||||
void copyFrom(World model) {
|
void copyFrom(World model) {
|
||||||
id = model.id;
|
id = model.id;
|
||||||
randomNumber = model.randomNumber;
|
randomNumber = model.randomNumber;
|
||||||
|
@ -183,14 +183,14 @@ class WorldSerializer extends Codec<World, Map> {
|
||||||
WorldDecoder get decoder => const WorldDecoder();
|
WorldDecoder get decoder => const WorldDecoder();
|
||||||
static World fromMap(Map map) {
|
static World fromMap(Map map) {
|
||||||
return World(
|
return World(
|
||||||
id: map['id'] as int?, randomNumber: map['randomNumber'] as int?);
|
id: map['id'] as int?, randomNumber: map['random_number'] as int?);
|
||||||
}
|
}
|
||||||
|
|
||||||
static Map<String, dynamic> toMap(_World? model) {
|
static Map<String, dynamic> toMap(_World? model) {
|
||||||
if (model == null) {
|
if (model == null) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
return {'id': model.id, 'randomNumber': model.randomNumber};
|
return {'id': model.id, 'random_number': model.randomNumber};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,5 +199,5 @@ abstract class WorldFields {
|
||||||
|
|
||||||
static const String id = 'id';
|
static const String id = 'id';
|
||||||
|
|
||||||
static const String randomNumber = 'randomNumber';
|
static const String randomNumber = 'random_number';
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,12 +19,12 @@ dev_dependencies:
|
||||||
angel3_framework: ^4.2.0
|
angel3_framework: ^4.2.0
|
||||||
build_runner: ^2.0.1
|
build_runner: ^2.0.1
|
||||||
lints: ^1.0.0
|
lints: ^1.0.0
|
||||||
#dependency_overrides:
|
dependency_overrides:
|
||||||
# angel3_migration_runner:
|
# angel3_migration_runner:
|
||||||
# path: ../angel_migration_runner
|
# path: ../angel_migration_runner
|
||||||
# angel3_orm:
|
angel3_orm:
|
||||||
# path: ../angel_orm
|
path: ../angel_orm
|
||||||
# angel3_migration:
|
# angel3_migration:
|
||||||
# path: ..//angel_migration
|
# path: ..//angel_migration
|
||||||
# angel3_orm_generator:
|
angel3_orm_generator:
|
||||||
# path: ../angel_orm_generator
|
path: ../angel_orm_generator
|
||||||
|
|
1
packages/orm/orm_builder/analysis_options.yaml
Normal file
1
packages/orm/orm_builder/analysis_options.yaml
Normal file
|
@ -0,0 +1 @@
|
||||||
|
include: package:lints/recommended.yaml
|
175
packages/orm/orm_builder/lib/src/belongs_to_test.dart
Normal file
175
packages/orm/orm_builder/lib/src/belongs_to_test.dart
Normal file
|
@ -0,0 +1,175 @@
|
||||||
|
import 'dart:async';
|
||||||
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
|
import 'package:test/test.dart';
|
||||||
|
import 'models/book.dart';
|
||||||
|
|
||||||
|
import 'util.dart';
|
||||||
|
|
||||||
|
void belongsToTests(FutureOr<QueryExecutor> Function() createExecutor,
|
||||||
|
{FutureOr<void> Function(QueryExecutor)? close}) {
|
||||||
|
late QueryExecutor executor;
|
||||||
|
Author? jkRowling;
|
||||||
|
Author? jameson;
|
||||||
|
Book? deathlyHallows;
|
||||||
|
close ??= (_) => null;
|
||||||
|
|
||||||
|
setUp(() async {
|
||||||
|
executor = await createExecutor();
|
||||||
|
|
||||||
|
// Insert an author
|
||||||
|
var query = AuthorQuery()..values.name = 'J.K. Rowling';
|
||||||
|
jkRowling = (await query.insert(executor)).value;
|
||||||
|
|
||||||
|
query.values.name = 'J.K. Jameson';
|
||||||
|
jameson = (await query.insert(executor)).value;
|
||||||
|
|
||||||
|
// And a book
|
||||||
|
var bookQuery = BookQuery();
|
||||||
|
bookQuery.values
|
||||||
|
..authorId = jkRowling!.idAsInt
|
||||||
|
..partnerAuthorId = jameson!.idAsInt
|
||||||
|
..name = 'Deathly Hallows';
|
||||||
|
|
||||||
|
deathlyHallows = (await bookQuery.insert(executor)).value;
|
||||||
|
});
|
||||||
|
|
||||||
|
tearDown(() => close!(executor));
|
||||||
|
|
||||||
|
group('selects', () {
|
||||||
|
test('select all', () async {
|
||||||
|
var query = BookQuery();
|
||||||
|
var books = await query.get(executor);
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
//print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows!.id);
|
||||||
|
expect(book.name, deathlyHallows!.name);
|
||||||
|
|
||||||
|
var author = book.author!;
|
||||||
|
//print(AuthorSerializer.toMap(author));
|
||||||
|
expect(author.id, jkRowling!.id);
|
||||||
|
expect(author.name, jkRowling!.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select one', () async {
|
||||||
|
var query = BookQuery();
|
||||||
|
query.where!.id.equals(int.parse(deathlyHallows!.id!));
|
||||||
|
//print(query.compile({}));
|
||||||
|
|
||||||
|
var bookOpt = await query.getOne(executor);
|
||||||
|
expect(bookOpt.isPresent, true);
|
||||||
|
bookOpt.ifPresent((book) {
|
||||||
|
//print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows!.id);
|
||||||
|
expect(book.name, deathlyHallows!.name);
|
||||||
|
|
||||||
|
var author = book.author!;
|
||||||
|
//print(AuthorSerializer.toMap(author));
|
||||||
|
expect(author.id, jkRowling!.id);
|
||||||
|
expect(author.name, jkRowling!.name);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('where clause', () async {
|
||||||
|
var query = BookQuery()
|
||||||
|
..where!.name.equals('Goblet of Fire')
|
||||||
|
..orWhere((w) => w.authorId.equals(jkRowling!.idAsInt));
|
||||||
|
//print(query.compile({}));
|
||||||
|
|
||||||
|
var books = await query.get(executor);
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
//print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows!.id);
|
||||||
|
expect(book.name, deathlyHallows!.name);
|
||||||
|
|
||||||
|
var author = book.author!;
|
||||||
|
//print(AuthorSerializer.toMap(author));
|
||||||
|
expect(author.id, jkRowling!.id);
|
||||||
|
expect(author.name, jkRowling!.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('union', () async {
|
||||||
|
var query1 = BookQuery()..where!.name.like('Deathly%');
|
||||||
|
var query2 = BookQuery()..where!.authorId.equals(-1);
|
||||||
|
var query3 = BookQuery()
|
||||||
|
..where!.name.isIn(['Goblet of Fire', 'Order of the Phoenix']);
|
||||||
|
query1
|
||||||
|
..union(query2)
|
||||||
|
..unionAll(query3);
|
||||||
|
//print(query1.compile({}));
|
||||||
|
|
||||||
|
var books = await query1.get(executor);
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
//print(book.toJson());
|
||||||
|
expect(book.id, deathlyHallows!.id);
|
||||||
|
expect(book.name, deathlyHallows!.name);
|
||||||
|
|
||||||
|
var author = book.author!;
|
||||||
|
//print(AuthorSerializer.toMap(author));
|
||||||
|
expect(author.id, jkRowling!.id);
|
||||||
|
expect(author.name, jkRowling!.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('order by', () async {
|
||||||
|
var query = AuthorQuery()..orderBy(AuthorFields.name, descending: true);
|
||||||
|
var authors = await query.get(executor);
|
||||||
|
expect(authors, [jkRowling, jameson]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert sets relationship', () {
|
||||||
|
expect(deathlyHallows!.author, jkRowling);
|
||||||
|
//expect(deathlyHallows.author, isNotNull);
|
||||||
|
//expect(deathlyHallows.author.name, rowling.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('delete stream', () async {
|
||||||
|
//printSeparator('Delete stream test');
|
||||||
|
var query = BookQuery()..where!.name.equals(deathlyHallows!.name!);
|
||||||
|
//print(query.compile({}, preamble: 'DELETE', withFields: false));
|
||||||
|
var books = await query.delete(executor);
|
||||||
|
expect(books, hasLength(1));
|
||||||
|
|
||||||
|
var book = books.first;
|
||||||
|
expect(book.id, deathlyHallows?.id);
|
||||||
|
expect(book.author, isNotNull);
|
||||||
|
expect(book.author!.name, jkRowling!.name);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('update book', () async {
|
||||||
|
var cloned = deathlyHallows!.copyWith(name: "Sorcerer's Stone");
|
||||||
|
var query = BookQuery()
|
||||||
|
..where?.id.equals(int.parse(cloned.id!))
|
||||||
|
..values.copyFrom(cloned);
|
||||||
|
var bookOpt = await (query.updateOne(executor));
|
||||||
|
expect(bookOpt.isPresent, true);
|
||||||
|
bookOpt.ifPresent((book) {
|
||||||
|
//print(book.toJson());
|
||||||
|
expect(book.name, cloned.name);
|
||||||
|
expect(book.author, isNotNull);
|
||||||
|
expect(book.author!.name, jkRowling!.name);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
group('joined subquery', () {
|
||||||
|
// To verify that the joined subquery is correct,
|
||||||
|
// we test both a query that return empty, and one
|
||||||
|
// that should return correctly.
|
||||||
|
test('returns empty on false subquery', () async {
|
||||||
|
printSeparator('False subquery test');
|
||||||
|
var query = BookQuery()..author.where!.name.equals('Billie Jean');
|
||||||
|
expect(await query.get(executor), isEmpty);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('returns values on true subquery', () async {
|
||||||
|
printSeparator('True subquery test');
|
||||||
|
var query = BookQuery()..author.where!.name.like('%Rowling%');
|
||||||
|
expect(await query.get(executor), [deathlyHallows]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
28
packages/orm/orm_builder/lib/src/models/book.dart
Normal file
28
packages/orm/orm_builder/lib/src/models/book.dart
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
library angel_orm3.generator.models.book;
|
||||||
|
|
||||||
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
|
import 'package:optional/optional.dart';
|
||||||
|
|
||||||
|
part 'book.g.dart';
|
||||||
|
|
||||||
|
@serializable
|
||||||
|
@orm
|
||||||
|
class _Book extends Model {
|
||||||
|
@BelongsTo(joinType: JoinType.inner)
|
||||||
|
_Author? author;
|
||||||
|
|
||||||
|
@BelongsTo(localKey: 'partner_author_id', joinType: JoinType.inner)
|
||||||
|
_Author? partnerAuthor;
|
||||||
|
|
||||||
|
String? name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@serializable
|
||||||
|
@orm
|
||||||
|
abstract class _Author extends Model {
|
||||||
|
@Column(length: 255, indexType: IndexType.unique)
|
||||||
|
@SerializableField(defaultValue: 'Tobe Osakwe')
|
||||||
|
String? get name;
|
||||||
|
}
|
617
packages/orm/orm_builder/lib/src/models/book.g.dart
Normal file
617
packages/orm/orm_builder/lib/src/models/book.g.dart
Normal file
|
@ -0,0 +1,617 @@
|
||||||
|
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||||
|
|
||||||
|
part of angel_orm3.generator.models.book;
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// MigrationGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
class BookMigration extends Migration {
|
||||||
|
@override
|
||||||
|
void up(Schema schema) {
|
||||||
|
schema.create('books', (table) {
|
||||||
|
table.serial('id').primaryKey();
|
||||||
|
table.timeStamp('created_at');
|
||||||
|
table.timeStamp('updated_at');
|
||||||
|
table.varChar('name', length: 255);
|
||||||
|
table.declare('author_id', ColumnType('int')).references('authors', 'id');
|
||||||
|
table
|
||||||
|
.declare('partner_author_id', ColumnType('int'))
|
||||||
|
.references('authors', 'id');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void down(Schema schema) {
|
||||||
|
schema.drop('books');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AuthorMigration extends Migration {
|
||||||
|
@override
|
||||||
|
void up(Schema schema) {
|
||||||
|
schema.create('authors', (table) {
|
||||||
|
table.serial('id').primaryKey();
|
||||||
|
table.timeStamp('created_at');
|
||||||
|
table.timeStamp('updated_at');
|
||||||
|
table.varChar('name', length: 255)
|
||||||
|
..defaultsTo('Tobe Osakwe')
|
||||||
|
..unique();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
void down(Schema schema) {
|
||||||
|
schema.drop('authors');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// OrmGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
class BookQuery extends Query<Book, BookQueryWhere> {
|
||||||
|
BookQuery({Query? parent, Set<String>? trampoline}) : super(parent: parent) {
|
||||||
|
trampoline ??= <String>{};
|
||||||
|
trampoline.add(tableName);
|
||||||
|
_where = BookQueryWhere(this);
|
||||||
|
join(_author = AuthorQuery(trampoline: trampoline, parent: this),
|
||||||
|
'author_id', 'id',
|
||||||
|
additionalFields: const ['id', 'created_at', 'updated_at', 'name'],
|
||||||
|
trampoline: trampoline);
|
||||||
|
join(_partnerAuthor = AuthorQuery(trampoline: trampoline, parent: this),
|
||||||
|
'partner_author_id', 'id',
|
||||||
|
additionalFields: const ['id', 'created_at', 'updated_at', 'name'],
|
||||||
|
trampoline: trampoline);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
final BookQueryValues values = BookQueryValues();
|
||||||
|
|
||||||
|
BookQueryWhere? _where;
|
||||||
|
|
||||||
|
late AuthorQuery _author;
|
||||||
|
|
||||||
|
late AuthorQuery _partnerAuthor;
|
||||||
|
|
||||||
|
@override
|
||||||
|
Map<String, String> get casts {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
String get tableName {
|
||||||
|
return 'books';
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
List<String> get fields {
|
||||||
|
return const [
|
||||||
|
'id',
|
||||||
|
'created_at',
|
||||||
|
'updated_at',
|
||||||
|
'author_id',
|
||||||
|
'partner_author_id',
|
||||||
|
'name'
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
BookQueryWhere? get where {
|
||||||
|
return _where;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
BookQueryWhere newWhereClause() {
|
||||||
|
return BookQueryWhere(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Optional<Book> parseRow(List row) {
|
||||||
|
if (row.every((x) => x == null)) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
var model = Book(
|
||||||
|
id: row[0].toString(),
|
||||||
|
createdAt: (row[1] as DateTime?),
|
||||||
|
updatedAt: (row[2] as DateTime?),
|
||||||
|
name: (row[5] as String?));
|
||||||
|
if (row.length > 6) {
|
||||||
|
var modelOpt = AuthorQuery.parseRow(row.skip(6).take(4).toList());
|
||||||
|
modelOpt.ifPresent((m) {
|
||||||
|
model = model.copyWith(author: m);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (row.length > 10) {
|
||||||
|
var modelOpt = AuthorQuery.parseRow(row.skip(10).take(4).toList());
|
||||||
|
modelOpt.ifPresent((m) {
|
||||||
|
model = model.copyWith(partnerAuthor: m);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return Optional.of(model);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Optional<Book> deserialize(List row) {
|
||||||
|
return parseRow(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
AuthorQuery get author {
|
||||||
|
return _author;
|
||||||
|
}
|
||||||
|
|
||||||
|
AuthorQuery get partnerAuthor {
|
||||||
|
return _partnerAuthor;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class BookQueryWhere extends QueryWhere {
|
||||||
|
BookQueryWhere(BookQuery query)
|
||||||
|
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||||
|
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
||||||
|
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
||||||
|
authorId = NumericSqlExpressionBuilder<int>(query, 'author_id'),
|
||||||
|
partnerAuthorId =
|
||||||
|
NumericSqlExpressionBuilder<int>(query, 'partner_author_id'),
|
||||||
|
name = StringSqlExpressionBuilder(query, 'name');
|
||||||
|
|
||||||
|
final NumericSqlExpressionBuilder<int> id;
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder createdAt;
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder updatedAt;
|
||||||
|
|
||||||
|
final NumericSqlExpressionBuilder<int> authorId;
|
||||||
|
|
||||||
|
final NumericSqlExpressionBuilder<int> partnerAuthorId;
|
||||||
|
|
||||||
|
final StringSqlExpressionBuilder name;
|
||||||
|
|
||||||
|
@override
|
||||||
|
List<SqlExpressionBuilder> get expressionBuilders {
|
||||||
|
return [id, createdAt, updatedAt, authorId, partnerAuthorId, name];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class BookQueryValues extends MapQueryValues {
|
||||||
|
@override
|
||||||
|
Map<String, String> get casts {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
String? get id {
|
||||||
|
return (values['id'] as String?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set id(String? value) => values['id'] = value;
|
||||||
|
DateTime? get createdAt {
|
||||||
|
return (values['created_at'] as DateTime?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set createdAt(DateTime? value) => values['created_at'] = value;
|
||||||
|
DateTime? get updatedAt {
|
||||||
|
return (values['updated_at'] as DateTime?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||||
|
int get authorId {
|
||||||
|
return (values['author_id'] as int);
|
||||||
|
}
|
||||||
|
|
||||||
|
set authorId(int value) => values['author_id'] = value;
|
||||||
|
int get partnerAuthorId {
|
||||||
|
return (values['partner_author_id'] as int);
|
||||||
|
}
|
||||||
|
|
||||||
|
set partnerAuthorId(int value) => values['partner_author_id'] = value;
|
||||||
|
String? get name {
|
||||||
|
return (values['name'] as String?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set name(String? value) => values['name'] = value;
|
||||||
|
void copyFrom(Book model) {
|
||||||
|
createdAt = model.createdAt;
|
||||||
|
updatedAt = model.updatedAt;
|
||||||
|
name = model.name;
|
||||||
|
if (model.author != null) {
|
||||||
|
values['author_id'] = model.author?.id;
|
||||||
|
}
|
||||||
|
if (model.partnerAuthor != null) {
|
||||||
|
values['partner_author_id'] = model.partnerAuthor?.id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AuthorQuery extends Query<Author, AuthorQueryWhere> {
|
||||||
|
AuthorQuery({Query? parent, Set<String>? trampoline})
|
||||||
|
: super(parent: parent) {
|
||||||
|
trampoline ??= <String>{};
|
||||||
|
trampoline.add(tableName);
|
||||||
|
_where = AuthorQueryWhere(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
final AuthorQueryValues values = AuthorQueryValues();
|
||||||
|
|
||||||
|
AuthorQueryWhere? _where;
|
||||||
|
|
||||||
|
@override
|
||||||
|
Map<String, String> get casts {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
String get tableName {
|
||||||
|
return 'authors';
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
List<String> get fields {
|
||||||
|
return const ['id', 'created_at', 'updated_at', 'name'];
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
AuthorQueryWhere? get where {
|
||||||
|
return _where;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
AuthorQueryWhere newWhereClause() {
|
||||||
|
return AuthorQueryWhere(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Optional<Author> parseRow(List row) {
|
||||||
|
if (row.every((x) => x == null)) {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
var model = Author(
|
||||||
|
id: row[0].toString(),
|
||||||
|
createdAt: (row[1] as DateTime?),
|
||||||
|
updatedAt: (row[2] as DateTime?),
|
||||||
|
name: (row[3] as String?));
|
||||||
|
return Optional.of(model);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Optional<Author> deserialize(List row) {
|
||||||
|
return parseRow(row);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AuthorQueryWhere extends QueryWhere {
|
||||||
|
AuthorQueryWhere(AuthorQuery query)
|
||||||
|
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||||
|
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
||||||
|
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
||||||
|
name = StringSqlExpressionBuilder(query, 'name');
|
||||||
|
|
||||||
|
final NumericSqlExpressionBuilder<int> id;
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder createdAt;
|
||||||
|
|
||||||
|
final DateTimeSqlExpressionBuilder updatedAt;
|
||||||
|
|
||||||
|
final StringSqlExpressionBuilder name;
|
||||||
|
|
||||||
|
@override
|
||||||
|
List<SqlExpressionBuilder> get expressionBuilders {
|
||||||
|
return [id, createdAt, updatedAt, name];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AuthorQueryValues extends MapQueryValues {
|
||||||
|
@override
|
||||||
|
Map<String, String> get casts {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
String? get id {
|
||||||
|
return (values['id'] as String?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set id(String? value) => values['id'] = value;
|
||||||
|
DateTime? get createdAt {
|
||||||
|
return (values['created_at'] as DateTime?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set createdAt(DateTime? value) => values['created_at'] = value;
|
||||||
|
DateTime? get updatedAt {
|
||||||
|
return (values['updated_at'] as DateTime?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||||
|
String? get name {
|
||||||
|
return (values['name'] as String?);
|
||||||
|
}
|
||||||
|
|
||||||
|
set name(String? value) => values['name'] = value;
|
||||||
|
void copyFrom(Author model) {
|
||||||
|
createdAt = model.createdAt;
|
||||||
|
updatedAt = model.updatedAt;
|
||||||
|
name = model.name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// JsonModelGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
@generatedSerializable
|
||||||
|
class Book extends _Book {
|
||||||
|
Book(
|
||||||
|
{this.id,
|
||||||
|
this.createdAt,
|
||||||
|
this.updatedAt,
|
||||||
|
this.author,
|
||||||
|
this.partnerAuthor,
|
||||||
|
this.name});
|
||||||
|
|
||||||
|
/// A unique identifier corresponding to this item.
|
||||||
|
@override
|
||||||
|
String? id;
|
||||||
|
|
||||||
|
/// The time at which this item was created.
|
||||||
|
@override
|
||||||
|
DateTime? createdAt;
|
||||||
|
|
||||||
|
/// The last time at which this item was updated.
|
||||||
|
@override
|
||||||
|
DateTime? updatedAt;
|
||||||
|
|
||||||
|
@override
|
||||||
|
_Author? author;
|
||||||
|
|
||||||
|
@override
|
||||||
|
_Author? partnerAuthor;
|
||||||
|
|
||||||
|
@override
|
||||||
|
String? name;
|
||||||
|
|
||||||
|
Book copyWith(
|
||||||
|
{String? id,
|
||||||
|
DateTime? createdAt,
|
||||||
|
DateTime? updatedAt,
|
||||||
|
_Author? author,
|
||||||
|
_Author? partnerAuthor,
|
||||||
|
String? name}) {
|
||||||
|
return Book(
|
||||||
|
id: id ?? this.id,
|
||||||
|
createdAt: createdAt ?? this.createdAt,
|
||||||
|
updatedAt: updatedAt ?? this.updatedAt,
|
||||||
|
author: author ?? this.author,
|
||||||
|
partnerAuthor: partnerAuthor ?? this.partnerAuthor,
|
||||||
|
name: name ?? this.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool operator ==(other) {
|
||||||
|
return other is _Book &&
|
||||||
|
other.id == id &&
|
||||||
|
other.createdAt == createdAt &&
|
||||||
|
other.updatedAt == updatedAt &&
|
||||||
|
other.author == author &&
|
||||||
|
other.partnerAuthor == partnerAuthor &&
|
||||||
|
other.name == name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get hashCode {
|
||||||
|
return hashObjects([id, createdAt, updatedAt, author, partnerAuthor, name]);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() {
|
||||||
|
return 'Book(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, author=$author, partnerAuthor=$partnerAuthor, name=$name)';
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() {
|
||||||
|
return BookSerializer.toMap(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@generatedSerializable
|
||||||
|
class Author extends _Author {
|
||||||
|
Author({this.id, this.createdAt, this.updatedAt, this.name = 'Tobe Osakwe'});
|
||||||
|
|
||||||
|
/// A unique identifier corresponding to this item.
|
||||||
|
@override
|
||||||
|
String? id;
|
||||||
|
|
||||||
|
/// The time at which this item was created.
|
||||||
|
@override
|
||||||
|
DateTime? createdAt;
|
||||||
|
|
||||||
|
/// The last time at which this item was updated.
|
||||||
|
@override
|
||||||
|
DateTime? updatedAt;
|
||||||
|
|
||||||
|
@override
|
||||||
|
String? name;
|
||||||
|
|
||||||
|
Author copyWith(
|
||||||
|
{String? id, DateTime? createdAt, DateTime? updatedAt, String? name}) {
|
||||||
|
return Author(
|
||||||
|
id: id ?? this.id,
|
||||||
|
createdAt: createdAt ?? this.createdAt,
|
||||||
|
updatedAt: updatedAt ?? this.updatedAt,
|
||||||
|
name: name ?? this.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool operator ==(other) {
|
||||||
|
return other is _Author &&
|
||||||
|
other.id == id &&
|
||||||
|
other.createdAt == createdAt &&
|
||||||
|
other.updatedAt == updatedAt &&
|
||||||
|
other.name == name;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
int get hashCode {
|
||||||
|
return hashObjects([id, createdAt, updatedAt, name]);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
String toString() {
|
||||||
|
return 'Author(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, name=$name)';
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, dynamic> toJson() {
|
||||||
|
return AuthorSerializer.toMap(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// **************************************************************************
|
||||||
|
// SerializerGenerator
|
||||||
|
// **************************************************************************
|
||||||
|
|
||||||
|
const BookSerializer bookSerializer = BookSerializer();
|
||||||
|
|
||||||
|
class BookEncoder extends Converter<Book, Map> {
|
||||||
|
const BookEncoder();
|
||||||
|
|
||||||
|
@override
|
||||||
|
Map convert(Book model) => BookSerializer.toMap(model);
|
||||||
|
}
|
||||||
|
|
||||||
|
class BookDecoder extends Converter<Map, Book> {
|
||||||
|
const BookDecoder();
|
||||||
|
|
||||||
|
@override
|
||||||
|
Book convert(Map map) => BookSerializer.fromMap(map);
|
||||||
|
}
|
||||||
|
|
||||||
|
class BookSerializer extends Codec<Book, Map> {
|
||||||
|
const BookSerializer();
|
||||||
|
|
||||||
|
@override
|
||||||
|
BookEncoder get encoder => const BookEncoder();
|
||||||
|
@override
|
||||||
|
BookDecoder get decoder => const BookDecoder();
|
||||||
|
static Book fromMap(Map map) {
|
||||||
|
return Book(
|
||||||
|
id: map['id'] as String?,
|
||||||
|
createdAt: map['created_at'] != null
|
||||||
|
? (map['created_at'] is DateTime
|
||||||
|
? (map['created_at'] as DateTime)
|
||||||
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
|
: null,
|
||||||
|
updatedAt: map['updated_at'] != null
|
||||||
|
? (map['updated_at'] is DateTime
|
||||||
|
? (map['updated_at'] as DateTime)
|
||||||
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
|
: null,
|
||||||
|
author: map['author'] != null
|
||||||
|
? AuthorSerializer.fromMap(map['author'] as Map)
|
||||||
|
: null,
|
||||||
|
partnerAuthor: map['partner_author'] != null
|
||||||
|
? AuthorSerializer.fromMap(map['partner_author'] as Map)
|
||||||
|
: null,
|
||||||
|
name: map['name'] as String?);
|
||||||
|
}
|
||||||
|
|
||||||
|
static Map<String, dynamic> toMap(_Book? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
'id': model.id,
|
||||||
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
'updated_at': model.updatedAt?.toIso8601String(),
|
||||||
|
'author': AuthorSerializer.toMap(model.author),
|
||||||
|
'partner_author': AuthorSerializer.toMap(model.partnerAuthor),
|
||||||
|
'name': model.name
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract class BookFields {
|
||||||
|
static const List<String> allFields = <String>[
|
||||||
|
id,
|
||||||
|
createdAt,
|
||||||
|
updatedAt,
|
||||||
|
author,
|
||||||
|
partnerAuthor,
|
||||||
|
name
|
||||||
|
];
|
||||||
|
|
||||||
|
static const String id = 'id';
|
||||||
|
|
||||||
|
static const String createdAt = 'created_at';
|
||||||
|
|
||||||
|
static const String updatedAt = 'updated_at';
|
||||||
|
|
||||||
|
static const String author = 'author';
|
||||||
|
|
||||||
|
static const String partnerAuthor = 'partner_author';
|
||||||
|
|
||||||
|
static const String name = 'name';
|
||||||
|
}
|
||||||
|
|
||||||
|
const AuthorSerializer authorSerializer = AuthorSerializer();
|
||||||
|
|
||||||
|
class AuthorEncoder extends Converter<Author, Map> {
|
||||||
|
const AuthorEncoder();
|
||||||
|
|
||||||
|
@override
|
||||||
|
Map convert(Author model) => AuthorSerializer.toMap(model);
|
||||||
|
}
|
||||||
|
|
||||||
|
class AuthorDecoder extends Converter<Map, Author> {
|
||||||
|
const AuthorDecoder();
|
||||||
|
|
||||||
|
@override
|
||||||
|
Author convert(Map map) => AuthorSerializer.fromMap(map);
|
||||||
|
}
|
||||||
|
|
||||||
|
class AuthorSerializer extends Codec<Author, Map> {
|
||||||
|
const AuthorSerializer();
|
||||||
|
|
||||||
|
@override
|
||||||
|
AuthorEncoder get encoder => const AuthorEncoder();
|
||||||
|
@override
|
||||||
|
AuthorDecoder get decoder => const AuthorDecoder();
|
||||||
|
static Author fromMap(Map map) {
|
||||||
|
return Author(
|
||||||
|
id: map['id'] as String?,
|
||||||
|
createdAt: map['created_at'] != null
|
||||||
|
? (map['created_at'] is DateTime
|
||||||
|
? (map['created_at'] as DateTime)
|
||||||
|
: DateTime.parse(map['created_at'].toString()))
|
||||||
|
: null,
|
||||||
|
updatedAt: map['updated_at'] != null
|
||||||
|
? (map['updated_at'] is DateTime
|
||||||
|
? (map['updated_at'] as DateTime)
|
||||||
|
: DateTime.parse(map['updated_at'].toString()))
|
||||||
|
: null,
|
||||||
|
name: map['name'] as String? ?? 'Tobe Osakwe');
|
||||||
|
}
|
||||||
|
|
||||||
|
static Map<String, dynamic> toMap(_Author? model) {
|
||||||
|
if (model == null) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
'id': model.id,
|
||||||
|
'created_at': model.createdAt?.toIso8601String(),
|
||||||
|
'updated_at': model.updatedAt?.toIso8601String(),
|
||||||
|
'name': model.name
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract class AuthorFields {
|
||||||
|
static const List<String> allFields = <String>[
|
||||||
|
id,
|
||||||
|
createdAt,
|
||||||
|
updatedAt,
|
||||||
|
name
|
||||||
|
];
|
||||||
|
|
||||||
|
static const String id = 'id';
|
||||||
|
|
||||||
|
static const String createdAt = 'created_at';
|
||||||
|
|
||||||
|
static const String updatedAt = 'updated_at';
|
||||||
|
|
||||||
|
static const String name = 'name';
|
||||||
|
}
|
38
packages/orm/orm_builder/lib/src/models/user.dart
Normal file
38
packages/orm/orm_builder/lib/src/models/user.dart
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
library angel3_orm_generator.test.models.user;
|
||||||
|
|
||||||
|
import 'package:angel3_migration/angel3_migration.dart';
|
||||||
|
import 'package:angel3_orm/angel3_orm.dart';
|
||||||
|
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||||
|
import 'package:optional/optional.dart';
|
||||||
|
|
||||||
|
part 'user.g.dart';
|
||||||
|
|
||||||
|
@serializable
|
||||||
|
@orm
|
||||||
|
abstract class _User extends Model {
|
||||||
|
String? get username;
|
||||||
|
String? get password;
|
||||||
|
String? get email;
|
||||||
|
|
||||||
|
@ManyToMany(_RoleUser)
|
||||||
|
List<_Role> get roles;
|
||||||
|
}
|
||||||
|
|
||||||
|
@serializable
|
||||||
|
@orm
|
||||||
|
abstract class _RoleUser {
|
||||||
|
@belongsTo
|
||||||
|
_Role? get role;
|
||||||
|
|
||||||
|
@belongsTo
|
||||||
|
_User? get user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@serializable
|
||||||
|
@orm
|
||||||
|
abstract class _Role extends Model {
|
||||||
|
String? name;
|
||||||
|
|
||||||
|
@ManyToMany(_RoleUser)
|
||||||
|
List<_User>? get users;
|
||||||
|
}
|
1002
packages/orm/orm_builder/lib/src/models/user.g.dart
Normal file
1002
packages/orm/orm_builder/lib/src/models/user.g.dart
Normal file
File diff suppressed because it is too large
Load diff
12
packages/orm/orm_builder/lib/src/util.dart
Normal file
12
packages/orm/orm_builder/lib/src/util.dart
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
import 'dart:io';
|
||||||
|
import 'package:io/ansi.dart';
|
||||||
|
|
||||||
|
void printSeparator(String title) {
|
||||||
|
var b = StringBuffer('===' + title.toUpperCase());
|
||||||
|
for (var i = b.length; i < stdout.terminalColumns - 3; i++) {
|
||||||
|
b.write('=');
|
||||||
|
}
|
||||||
|
for (var i = 0; i < 3; i++) {
|
||||||
|
print(magenta.wrap(b.toString()));
|
||||||
|
}
|
||||||
|
}
|
31
packages/orm/orm_builder/pubspec.yaml
Normal file
31
packages/orm/orm_builder/pubspec.yaml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
name: orm_builder
|
||||||
|
version: 1.0.0
|
||||||
|
description: ORM Builder Testing
|
||||||
|
published_to: none
|
||||||
|
environment:
|
||||||
|
sdk: '>=2.12.0 <3.0.0'
|
||||||
|
dependencies:
|
||||||
|
angel3_migration: ^4.0.0
|
||||||
|
angel3_model: ^3.1.0
|
||||||
|
angel3_orm: ^4.0.0
|
||||||
|
angel3_serialize: ^4.1.0
|
||||||
|
io: ^1.0.0
|
||||||
|
test: ^1.17.4
|
||||||
|
collection: ^1.15.0
|
||||||
|
optional: ^6.0.0
|
||||||
|
dev_dependencies:
|
||||||
|
angel3_orm_generator: ^4.1.0
|
||||||
|
angel3_framework: ^4.2.0
|
||||||
|
build_runner: ^2.0.1
|
||||||
|
lints: ^1.0.0
|
||||||
|
dependency_overrides:
|
||||||
|
# angel3_migration_runner:
|
||||||
|
# path: ../angel_migration_runner
|
||||||
|
angel3_orm:
|
||||||
|
path: ../angel_orm
|
||||||
|
angel3_migration:
|
||||||
|
path: ../angel_migration
|
||||||
|
angel3_orm_generator:
|
||||||
|
path: ../angel_orm_generator
|
||||||
|
angel3_serialize:
|
||||||
|
path: ../../serialize/angel_serialize
|
Loading…
Reference in a new issue