Merge pull request #74 from dukefirehawk/bug-fix/orm-generator
Bug fix/orm generator
This commit is contained in:
commit
f93f921162
110 changed files with 2777 additions and 331 deletions
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
|
@ -13,5 +13,6 @@
|
|||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.markdownlint": true
|
||||
}
|
||||
},
|
||||
"cmake.configureOnOpen": false
|
||||
}
|
|
@ -63,8 +63,9 @@ For more details, checkout [Project Status](https://github.com/dukefirehawk/ange
|
|||
1. Download and install [Dart](https://dart.dev/get-dart)
|
||||
|
||||
2. Clone one of the following starter projects:
|
||||
* [Angel3 Basic Template](https://github.com/dukefirehawk/boilerplates/tree/angel3-basic)
|
||||
* [Angel3 ORM Template](https://github.com/dukefirehawk/boilerplates/tree/angel3-orm)
|
||||
* [Basic Template](https://github.com/dukefirehawk/boilerplates/tree/angel3-basic)
|
||||
* [ORM for PostgreSQL Template](https://github.com/dukefirehawk/boilerplates/tree/angel3-orm)
|
||||
* [ORM for MySQL Template](https://github.com/dukefirehawk/boilerplates/tree/angel3-orm-mysql)
|
||||
* [Angel3 Graphql Template](https://github.com/dukefirehawk/boilerplates/tree/angel3-graphql)
|
||||
|
||||
3. Run the project in development mode (*hot-reloaded* is enabled on file changes).
|
||||
|
@ -123,12 +124,12 @@ Check out [Migrating to Angel3](https://angel3-docs.dukefirehawk.com/migration/a
|
|||
|
||||
The performance benchmark can be found at
|
||||
|
||||
[TechEmpower Framework Benchmarks](https://tfb-status.techempower.com/)
|
||||
[TechEmpower Framework Benchmarks Round 21](https://www.techempower.com/benchmarks/#section=data-r21&test=composite)
|
||||
|
||||
The test cases are build using standard `Angel3 ORM` template. This result will be used for fine-tuning Angel3 framework. The following test cases will be progressively added in the upcoming update to benchmark.
|
||||
|
||||
1. Angel3 with MongoDB
|
||||
2. Angel3 with MySQL ORM
|
||||
2. Angel3 with ORM MySQL
|
||||
3. Cached queries
|
||||
|
||||
## Examples and Documentation
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
name: angel_orm_sqlite
|
||||
environment:
|
||||
sdk: ">=2.16.0 <3.0.0"
|
||||
dependencies:
|
||||
angel_orm: ^2.0.0-dev
|
||||
|
|
30
docker/README.md
Normal file
30
docker/README.md
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Working with Docker
|
||||
|
||||
## Postgresql
|
||||
|
||||
### Starting the container
|
||||
|
||||
```bash
|
||||
docker-compose -f docker-compose-pg.yml up
|
||||
```
|
||||
|
||||
### Running psql
|
||||
|
||||
```bash
|
||||
docker exec -it <container id> /bin/bash
|
||||
psql --username postgres
|
||||
```
|
||||
|
||||
### Create database, user and access
|
||||
|
||||
```psql
|
||||
postgres=# create database orm_test;
|
||||
postgres=# create user test with encrypted password 'test123';
|
||||
postgres=# grant all privileges on database orm_test to test;
|
||||
```
|
||||
|
||||
## MariaDB
|
||||
|
||||
## MySQL
|
||||
|
||||
## Redis
|
20
docker/docker-compose-mariadb.yml
Normal file
20
docker/docker-compose-mariadb.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
mariadb:
|
||||
image: mariadb:latest
|
||||
restart: "no"
|
||||
ports:
|
||||
- "3306:3306"
|
||||
environment:
|
||||
- MARIADB_ROOT_PASSWORD=Qwerty
|
||||
volumes:
|
||||
- "mariadb:/var/lib/mysql"
|
||||
networks:
|
||||
- webnet
|
||||
|
||||
volumes:
|
||||
mariadb:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
webnet:
|
20
docker/docker-compose-mysql.yml
Normal file
20
docker/docker-compose-mysql.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
mysql:
|
||||
image: mysql:latest
|
||||
restart: "no"
|
||||
ports:
|
||||
- "3306:3306"
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=Qwerty
|
||||
volumes:
|
||||
- "mysql:/var/lib/mysql"
|
||||
networks:
|
||||
- webnet
|
||||
|
||||
volumes:
|
||||
mysql:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
webnet:
|
20
docker/docker-compose-pg.yml
Normal file
20
docker/docker-compose-pg.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
pgdb:
|
||||
image: postgres:latest
|
||||
restart: "no"
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
volumes:
|
||||
- "db:/var/lib/postgresql/data"
|
||||
networks:
|
||||
- webnet
|
||||
volumes:
|
||||
db:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
webnet:
|
21
docker/docker-compose-redis.yml
Normal file
21
docker/docker-compose-redis.yml
Normal file
|
@ -0,0 +1,21 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
redis:
|
||||
image: redis:latest
|
||||
restart: "no"
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
volumes:
|
||||
- "redis:/data"
|
||||
networks:
|
||||
- webnet
|
||||
|
||||
volumes:
|
||||
redis:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
webnet:
|
93
experiment/db/bin/mysql_main.dart
Normal file
93
experiment/db/bin/mysql_main.dart
Normal file
|
@ -0,0 +1,93 @@
|
|||
import 'dart:io';
|
||||
|
||||
import 'package:mysql_client/mysql_client.dart';
|
||||
import 'package:mysql1/mysql1.dart';
|
||||
|
||||
void main() async {
|
||||
print("=== Start 'mysql1' driver test");
|
||||
await testMySQL1Driver().catchError((error, stackTrace) {
|
||||
print(error);
|
||||
});
|
||||
print("=== End test");
|
||||
print(" ");
|
||||
|
||||
print("=== Start 'mysql_client' driver test");
|
||||
await testMySQLClientDriver().catchError((error, stackTrace) {
|
||||
print(error);
|
||||
});
|
||||
print("=== End test");
|
||||
print(" ");
|
||||
|
||||
//sleep(Duration(seconds: 5));
|
||||
exit(0);
|
||||
}
|
||||
|
||||
Future<void> testMySQLClientDriver() async {
|
||||
var connection = await MySQLConnection.createConnection(
|
||||
host: "localhost",
|
||||
port: 3306,
|
||||
databaseName: "orm_test",
|
||||
userName: "test",
|
||||
password: "test123",
|
||||
secure: false);
|
||||
await connection.connect(timeoutMs: 30000);
|
||||
|
||||
print(">Test Select All");
|
||||
var result = await connection.execute("SELECT * from users");
|
||||
print("Total records: ${result.rows.length}");
|
||||
|
||||
print(">Test Insert");
|
||||
var params = {
|
||||
"username": "test",
|
||||
"password": "test123",
|
||||
"email": "test@demo.com",
|
||||
"updatedAt": DateTime.parse("1970-01-01 00:00:00")
|
||||
};
|
||||
|
||||
result = await connection.execute(
|
||||
"INSERT INTO users (username, password, email, updated_at) VALUES (:username, :password, :email, :updatedAt)",
|
||||
params);
|
||||
print("Last inserted ID: ${result.lastInsertID}");
|
||||
|
||||
print(">Test Select By ID");
|
||||
result = await connection.execute(
|
||||
"SELECT * from users where id=:id", {"id": result.lastInsertID.toInt()});
|
||||
print("Read record: ${result.rows.first.assoc()}");
|
||||
}
|
||||
|
||||
Future<void> testMySQL1Driver() async {
|
||||
var settings = ConnectionSettings(
|
||||
host: 'localhost',
|
||||
port: 3306,
|
||||
db: 'orm_test',
|
||||
user: 'test',
|
||||
password: 'test123',
|
||||
timeout: Duration(seconds: 60));
|
||||
var connection = await MySqlConnection.connect(settings);
|
||||
|
||||
print(">Test Select All");
|
||||
var result = await connection.query("SELECT * from users");
|
||||
print("Total records: ${result.length}");
|
||||
|
||||
print(">Test Insert");
|
||||
var params = [
|
||||
"test",
|
||||
"test123",
|
||||
"test@demo.com",
|
||||
DateTime.parse("1970-01-01 00:00:00").toUtc()
|
||||
];
|
||||
|
||||
// DateTime.parse("1970-01-01 00:00:01").toUtc()
|
||||
result = await connection.query(
|
||||
"INSERT INTO users (username, password, email, updated_at) VALUES (?, ?, ?, ?)",
|
||||
params);
|
||||
print("Last inserted ID: ${result.insertId}");
|
||||
|
||||
print(">Test Select By ID");
|
||||
result = await connection
|
||||
.query("SELECT * from users where id=?", [result.insertId]);
|
||||
print("Read record: ${result.first.values}");
|
||||
|
||||
var d = DateTime.parse("1970-01-01 00:00:00").toUtc();
|
||||
print("Local time: ${d.toLocal()}");
|
||||
}
|
53
experiment/db/bin/pg_main.dart
Normal file
53
experiment/db/bin/pg_main.dart
Normal file
|
@ -0,0 +1,53 @@
|
|||
import 'dart:io';
|
||||
|
||||
import 'package:postgres/postgres.dart';
|
||||
|
||||
void main() async {
|
||||
/*
|
||||
* Granting permission in postgres
|
||||
* grant all privileges on database orm_test to test;
|
||||
* grant all privileges on sequence users_id_seq to test;
|
||||
*/
|
||||
print("=== Start 'postgres' driver test");
|
||||
await testPgDriver().catchError((error, stackTrace) {
|
||||
print(error);
|
||||
});
|
||||
print("=== End test");
|
||||
print(" ");
|
||||
|
||||
exit(0);
|
||||
}
|
||||
|
||||
Future<void> testPgDriver() async {
|
||||
var conn = PostgreSQLConnection('localhost', 5432, 'orm_test',
|
||||
username: 'test', password: 'test123');
|
||||
await conn.open();
|
||||
|
||||
print(">Test Select All");
|
||||
var result = await conn.query("SELECT * from users");
|
||||
print("Total records: ${result.length}");
|
||||
for (var row in result) {
|
||||
print(row[0]);
|
||||
for (var element in row) {
|
||||
print(element);
|
||||
}
|
||||
}
|
||||
|
||||
print(">Test Insert");
|
||||
var params = {
|
||||
"username": "test",
|
||||
"password": "test123",
|
||||
"email": "test@demo.com",
|
||||
"updatedAt": DateTime.parse("1970-01-01 00:00:00")
|
||||
};
|
||||
|
||||
result = await conn.query(
|
||||
"INSERT INTO users (username, password, email, updated_at) VALUES (@username, @password, @email, @updatedAt)",
|
||||
substitutionValues: params);
|
||||
//print("Last inserted ID: ${result.}");
|
||||
|
||||
//print(">Test Select By ID");
|
||||
//result = await conn.query("SELECT * from users where id=@id",
|
||||
// substitutionValues: {"id": result});
|
||||
//print("Read record: ${result.length}");
|
||||
}
|
14
experiment/db/pubspec.yaml
Normal file
14
experiment/db/pubspec.yaml
Normal file
|
@ -0,0 +1,14 @@
|
|||
name: performance_tool
|
||||
version: 1.0.0
|
||||
description: Angel3 performance testing tool
|
||||
publish_to: none
|
||||
environment:
|
||||
sdk: '>=2.16.0 <3.0.0'
|
||||
published_to: none
|
||||
dependencies:
|
||||
mysql1: ^0.20.0
|
||||
mysql_client: ^0.0.24
|
||||
postgres: ^2.4.1
|
||||
postgres_pool: ^2.1.3
|
||||
dev_dependencies:
|
||||
lints: ^1.0.0
|
6
experiment/logging/AUTHORS.md
Normal file
6
experiment/logging/AUTHORS.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
Primary Authors
|
||||
===============
|
||||
|
||||
* __[Thomas Hii](dukefirehawk.apps@gmail.com)__
|
||||
|
||||
The current main maintainer of the code base.
|
29
experiment/logging/LICENSE
Normal file
29
experiment/logging/LICENSE
Normal file
|
@ -0,0 +1,29 @@
|
|||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2021, dukefirehawk.com
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1
experiment/logging/analysis_options.yaml
Normal file
1
experiment/logging/analysis_options.yaml
Normal file
|
@ -0,0 +1 @@
|
|||
include: package:lints/recommended.yaml
|
22
experiment/logging/bin/main.dart
Normal file
22
experiment/logging/bin/main.dart
Normal file
|
@ -0,0 +1,22 @@
|
|||
import 'dart:isolate';
|
||||
|
||||
Future<void> runApp(var message) async {
|
||||
var stopwatch = Stopwatch()..start();
|
||||
|
||||
// Do something
|
||||
|
||||
print('Execution($message) Time: ${stopwatch.elapsed.inMilliseconds}ms');
|
||||
stopwatch.stop();
|
||||
}
|
||||
|
||||
void main() async {
|
||||
var concurrency = 6000;
|
||||
|
||||
for (var i = 0; i < concurrency; i++) {
|
||||
Isolate.spawn(runApp, 'Instance_$i');
|
||||
}
|
||||
|
||||
await Future.delayed(const Duration(seconds: 10));
|
||||
|
||||
//print("Exit");
|
||||
}
|
6
experiment/performance/AUTHORS.md
Normal file
6
experiment/performance/AUTHORS.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
Primary Authors
|
||||
===============
|
||||
|
||||
* __[Thomas Hii](dukefirehawk.apps@gmail.com)__
|
||||
|
||||
The current main maintainer of the code base.
|
29
experiment/performance/LICENSE
Normal file
29
experiment/performance/LICENSE
Normal file
|
@ -0,0 +1,29 @@
|
|||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2021, dukefirehawk.com
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
1
experiment/performance/analysis_options.yaml
Normal file
1
experiment/performance/analysis_options.yaml
Normal file
|
@ -0,0 +1 @@
|
|||
include: package:lints/recommended.yaml
|
11
experiment/performance/pubspec.yaml
Normal file
11
experiment/performance/pubspec.yaml
Normal file
|
@ -0,0 +1,11 @@
|
|||
name: performance_tool
|
||||
version: 1.0.0
|
||||
description: Angel3 performance testing tool
|
||||
publish_to: none
|
||||
environment:
|
||||
sdk: '>=2.16.0 <3.0.0'
|
||||
published_to: none
|
||||
dependencies:
|
||||
http: ^0.13.4
|
||||
dev_dependencies:
|
||||
lints: ^1.0.0
|
|
@ -15,4 +15,4 @@ dependencies:
|
|||
dart_twitter_api: ^0.5.6+1
|
||||
dev_dependencies:
|
||||
logging: ^1.0.0
|
||||
lints: ^2.0.0
|
||||
lints: ^1.0.0
|
||||
|
|
|
@ -291,15 +291,23 @@ abstract class Driver<
|
|||
Future sendResponse(Request request, Response response, RequestContext req,
|
||||
ResponseContext res,
|
||||
{bool ignoreFinalizers = false}) {
|
||||
//app.logger.fine("Calling SendResponse");
|
||||
Future<void> _cleanup(_) {
|
||||
if (!app.environment.isProduction && req.container!.has<Stopwatch>()) {
|
||||
var sw = req.container!.make<Stopwatch>();
|
||||
app.logger.info(
|
||||
app.logger.fine(
|
||||
"${res.statusCode} ${req.method} ${req.uri} (${sw.elapsedMilliseconds} ms)");
|
||||
}
|
||||
return req.close();
|
||||
}
|
||||
|
||||
// TODO: Debugging header
|
||||
/*
|
||||
for (var key in res.headers.keys) {
|
||||
app.logger.fine("Response header key: $key");
|
||||
}
|
||||
*/
|
||||
|
||||
if (!res.isBuffered) {
|
||||
//if (res.isOpen) {
|
||||
return res.close().then(_cleanup);
|
||||
|
@ -307,6 +315,8 @@ abstract class Driver<
|
|||
//return Future.value();
|
||||
}
|
||||
|
||||
//app.logger.fine("Calling finalizers");
|
||||
|
||||
var finalizers = ignoreFinalizers == true
|
||||
? Future.value()
|
||||
: Future.forEach(app.responseFinalizers, (dynamic f) => f(req, res));
|
||||
|
@ -315,6 +325,7 @@ abstract class Driver<
|
|||
//if (res.isOpen) res.close();
|
||||
|
||||
for (var key in res.headers.keys) {
|
||||
app.logger.fine("Response header key: $key");
|
||||
setHeader(response, key, res.headers[key] ?? '');
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
# Change Log
|
||||
|
||||
## 6.1.0
|
||||
|
||||
* Updated temporal fields
|
||||
|
||||
## 6.0.0
|
||||
|
||||
* Updated to SDK 2.16.x
|
||||
|
|
|
@ -5,10 +5,20 @@
|
|||
[![Gitter](https://img.shields.io/gitter/room/angel_dart/discussion)](https://gitter.im/angel_dart/discussion)
|
||||
[![License](https://img.shields.io/github/license/dukefirehawk/angel)](https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration/LICENSE)
|
||||
|
||||
A database migration framework built for Angel3 ORM.
|
||||
A basic database migration framework built for Angel3 ORM.
|
||||
|
||||
## Supported database
|
||||
|
||||
* PostgreSQL version 10 or later
|
||||
* MariaDB 10.2.x
|
||||
* MySQL 8.x
|
||||
* MariaDB 10.2.x or later
|
||||
* MySQL 8.x or later
|
||||
|
||||
## Features
|
||||
|
||||
* Create tables based on ORM models
|
||||
* Drop tables based on ORM models
|
||||
* Add new tables based ORM models
|
||||
|
||||
## Limitation
|
||||
|
||||
* Alter table/fields based on updated ORM models not supported
|
||||
|
|
|
@ -12,6 +12,7 @@ class MigrationColumn extends Column {
|
|||
@override
|
||||
IndexType get indexType => _index;
|
||||
|
||||
@override
|
||||
dynamic get defaultValue => _defaultValue;
|
||||
|
||||
List<MigrationColumnReference> get externalReferences =>
|
||||
|
@ -21,8 +22,12 @@ class MigrationColumn extends Column {
|
|||
{bool isNullable = true,
|
||||
int length = 255,
|
||||
IndexType indexType = IndexType.standardIndex,
|
||||
defaultValue})
|
||||
: super(type: type, length: length) {
|
||||
dynamic defaultValue})
|
||||
: super(
|
||||
type: type,
|
||||
length: length,
|
||||
isNullable: isNullable,
|
||||
defaultValue: defaultValue) {
|
||||
_nullable = isNullable;
|
||||
_index = indexType;
|
||||
_defaultValue = defaultValue;
|
||||
|
|
|
@ -13,7 +13,11 @@ abstract class Table {
|
|||
|
||||
MigrationColumn float(String name) => declare(name, ColumnType.float);
|
||||
|
||||
MigrationColumn numeric(String name) => declare(name, ColumnType.numeric);
|
||||
MigrationColumn double(String name) => declare(name, ColumnType.double);
|
||||
|
||||
MigrationColumn numeric(String name, {int precision = 17, int scale = 3}) {
|
||||
return declare(name, ColumnType.numeric);
|
||||
}
|
||||
|
||||
MigrationColumn boolean(String name) => declare(name, ColumnType.boolean);
|
||||
|
||||
|
@ -23,7 +27,9 @@ abstract class Table {
|
|||
//MigrationColumn dateTime(String name) => timeStamp(name, timezone: true);
|
||||
|
||||
MigrationColumn timeStamp(String name, {bool timezone = false}) {
|
||||
if (timezone != true) return declare(name, ColumnType.timeStamp);
|
||||
if (!timezone) {
|
||||
return declare(name, ColumnType.timeStamp);
|
||||
}
|
||||
return declare(name, ColumnType.timeStampWithTimeZone);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: angel3_migration
|
||||
version: 6.0.0
|
||||
version: 6.1.0
|
||||
description: Database migration runtime for Angel3 ORM. Use this package to define schemas.
|
||||
homepage: https://angel3-framework.web.app/
|
||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration
|
||||
|
@ -9,6 +9,6 @@ dependencies:
|
|||
angel3_orm: ^6.0.0
|
||||
dev_dependencies:
|
||||
lints: ^1.0.0
|
||||
# dependency_overrides:
|
||||
# angel3_orm:
|
||||
# path: ../angel_orm
|
||||
dependency_overrides:
|
||||
angel3_orm:
|
||||
path: ../angel_orm
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
# Change Log
|
||||
|
||||
## 6.1.0
|
||||
|
||||
* Fixed issue #70. Incorrectly generated SQL for `defaultsTo('Default Value')`
|
||||
* Mapped timestamp to datetime for MySQL and MariaDB
|
||||
* Fixed `MariaDbMigrationRunner` to work with MariaDB
|
||||
* Fixed `MySqlMigrationRunner` to work with MySQL and MariaDB
|
||||
|
||||
## 6.0.1
|
||||
|
||||
* Added `MariaDbMigrationRunner` to support MariaDB migration with `mysql1` driver
|
||||
|
|
|
@ -5,20 +5,24 @@
|
|||
[![Gitter](https://img.shields.io/gitter/room/angel_dart/discussion)](https://gitter.im/angel_dart/discussion)
|
||||
[![License](https://img.shields.io/github/license/dukefirehawk/angel)](https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration_runner/LICENSE)
|
||||
|
||||
Command-line based database migration runner for Angel3 ORM.
|
||||
Database migration runner for Angel3 ORM.
|
||||
|
||||
Supported database:
|
||||
|
||||
* PostgreSQL version 10 or later
|
||||
* MariaDB 10.2.x or later
|
||||
* MySQL 8.x or later
|
||||
* PostgreSQL 10.x or greater
|
||||
* MariaDB 10.2.x or greater
|
||||
* MySQL 8.x or greater
|
||||
|
||||
## Usage
|
||||
|
||||
* For PostgreSQL, use `PostgresMigrationRunner` to perform the database migration.
|
||||
* Use `PostgresMigrationRunner` to perform database migration for PostgreSQL.
|
||||
|
||||
* For MariaDB, use `MariaDbMigrationRunner` to perform the database migration.
|
||||
* Use `MySqlMigrationRunner` to perform database migration for MySQL and MariaDB. This runner is using [`mysql_client`](https://pub.dev/packages?q=mysql_client) driver.
|
||||
|
||||
* For MySQL, use `MySqlMigrationRunner` to perform the database migration.
|
||||
* Use `MariaDbMigrationRunner` to perform database migration for MariaDB. This runner is using[`mysql1`](https://pub.dev/packages?q=mysql1) driver.
|
||||
|
||||
**Important Notes** For MariaDB and MySQL, both migration runner are using different drivers. MariaDB is using `mysql1` driver while MySQL is using `mysql_client` driver. This is necessary as neither driver works correctly over both MariaDB and MySQL. Based on testing, `mysql1` driver works seamlessly with MariaDB 10.2.x while `mysql_client` works well with MySQL 8.x.
|
||||
## Supported Operations
|
||||
|
||||
* reset - Clear out all records in the `migrations` table and drop all the managed ORM tables.
|
||||
* up - Generate all the managed ORM tables based on the ORM models.
|
||||
* refresh - Run `reset` follow by `up`
|
||||
|
|
|
@ -45,7 +45,7 @@ class MariaDbMigrationRunner implements MigrationRunner {
|
|||
PRIMARY KEY(id)
|
||||
);
|
||||
''').then((result) {
|
||||
_log.info('Check and create "migrations" table');
|
||||
_log.fine('Check and create "migrations" table');
|
||||
}).catchError((e) {
|
||||
_log.severe('Failed to create "migrations" table.', e);
|
||||
});
|
||||
|
@ -54,18 +54,25 @@ class MariaDbMigrationRunner implements MigrationRunner {
|
|||
@override
|
||||
Future up() async {
|
||||
await _init();
|
||||
var r = await connection.query('SELECT path from migrations;');
|
||||
var existing = r.expand((x) => x).cast<String>();
|
||||
var toRun = <String>[];
|
||||
var result = await connection.query('SELECT path from migrations;');
|
||||
|
||||
var existing = <String>[];
|
||||
if (result.isNotEmpty) {
|
||||
existing = result.expand((x) => x).cast<String>().toList();
|
||||
}
|
||||
|
||||
var toRun = <String>[];
|
||||
migrations.forEach((k, v) {
|
||||
if (!existing.contains(k)) toRun.add(k);
|
||||
});
|
||||
|
||||
if (toRun.isNotEmpty) {
|
||||
var r = await connection.query('SELECT MAX(batch) from migrations;');
|
||||
var rTmp = r.toList();
|
||||
var curBatch = int.tryParse(rTmp[0][0] ?? '0') as int;
|
||||
var result = await connection.query('SELECT MAX(batch) from migrations;');
|
||||
var curBatch = 0;
|
||||
if (result.isNotEmpty) {
|
||||
var firstRow = result.toList();
|
||||
curBatch = int.tryParse(firstRow[0][0] ?? '0') as int;
|
||||
}
|
||||
var batch = curBatch + 1;
|
||||
|
||||
for (var k in toRun) {
|
||||
|
@ -73,11 +80,10 @@ class MariaDbMigrationRunner implements MigrationRunner {
|
|||
var schema = MariaDbSchema();
|
||||
migration.up(schema);
|
||||
_log.info('Added "$k" into "migrations" table.');
|
||||
|
||||
try {
|
||||
await schema.run(connection).then((_) async {
|
||||
var result = await connection.query(
|
||||
"INSERT INTO MIGRATIONS (batch, path) VALUES ($batch, '$k')");
|
||||
"INSERT INTO migrations (batch, path) VALUES ($batch, '$k')");
|
||||
|
||||
return result.affectedRows;
|
||||
});
|
||||
|
@ -94,13 +100,21 @@ class MariaDbMigrationRunner implements MigrationRunner {
|
|||
Future rollback() async {
|
||||
await _init();
|
||||
|
||||
var r = await connection.query('SELECT MAX(batch) from migrations;');
|
||||
var rTmp = r.toList();
|
||||
var curBatch = int.tryParse(rTmp[0][0] ?? 0) as int;
|
||||
var result = await connection.query('SELECT MAX(batch) from migrations;');
|
||||
|
||||
r = await connection
|
||||
var curBatch = 0;
|
||||
if (result.isNotEmpty) {
|
||||
var firstRow = result.toList();
|
||||
curBatch = int.tryParse(firstRow[0][0]) as int;
|
||||
}
|
||||
|
||||
result = await connection
|
||||
.query('SELECT path from migrations WHERE batch = $curBatch;');
|
||||
var existing = r.expand((x) => x).cast<String>();
|
||||
var existing = <String>[];
|
||||
if (result.isNotEmpty) {
|
||||
existing = result.expand((x) => x).cast<String>().toList();
|
||||
}
|
||||
|
||||
var toRun = <String>[];
|
||||
|
||||
migrations.forEach((k, v) {
|
||||
|
@ -128,7 +142,11 @@ class MariaDbMigrationRunner implements MigrationRunner {
|
|||
await _init();
|
||||
var r = await connection
|
||||
.query('SELECT path from migrations ORDER BY batch DESC;');
|
||||
var existing = r.expand((x) => x).cast<String>();
|
||||
var existing = <String>[];
|
||||
if (r.isNotEmpty) {
|
||||
existing = r.expand((x) => x).cast<String>().toList();
|
||||
}
|
||||
|
||||
var toRun = existing.where(migrations.containsKey).toList();
|
||||
|
||||
if (toRun.isNotEmpty) {
|
||||
|
|
|
@ -23,7 +23,7 @@ class MariaDbSchema extends Schema {
|
|||
_log.severe('Failed to run query: [ $sql ]', e);
|
||||
throw e;
|
||||
});
|
||||
affectedRows = result?.affectedRows ?? 0;
|
||||
affectedRows = result.affectedRows ?? 0;
|
||||
});
|
||||
|
||||
return affectedRows;
|
||||
|
@ -42,7 +42,7 @@ class MariaDbSchema extends Schema {
|
|||
@override
|
||||
void drop(String tableName, {bool cascade = false}) {
|
||||
var c = cascade == true ? ' CASCADE' : '';
|
||||
_writeln('DROP TABLE "$tableName"$c;');
|
||||
_writeln('DROP TABLE $tableName$c;');
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -6,7 +6,12 @@ import 'package:charcode/ascii.dart';
|
|||
abstract class MariaDbGenerator {
|
||||
static String columnType(MigrationColumn column) {
|
||||
var str = column.type.name;
|
||||
if (column.type.hasSize) {
|
||||
|
||||
// Map timestamp time to datetime
|
||||
if (column.type == ColumnType.timeStamp) {
|
||||
str = ColumnType.dateTime.name;
|
||||
}
|
||||
if (column.type.hasLength) {
|
||||
return '$str(${column.length})';
|
||||
} else {
|
||||
return str;
|
||||
|
@ -46,7 +51,7 @@ abstract class MariaDbGenerator {
|
|||
}
|
||||
|
||||
for (var ref in column.externalReferences) {
|
||||
buf.write(' ' + compileReference(ref));
|
||||
buf.write(' ${compileReference(ref)}');
|
||||
}
|
||||
|
||||
return buf.toString();
|
||||
|
@ -54,7 +59,7 @@ abstract class MariaDbGenerator {
|
|||
|
||||
static String compileReference(MigrationColumnReference ref) {
|
||||
var buf = StringBuffer('REFERENCES ${ref.foreignTable}(${ref.foreignKey})');
|
||||
if (ref.behavior != null) buf.write(' ' + ref.behavior!);
|
||||
if (ref.behavior != null) buf.write(' ${ref.behavior!}');
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
@ -147,8 +152,8 @@ class MariaDbAlterTable extends Table implements MutableTable {
|
|||
|
||||
@override
|
||||
void changeColumnType(String name, ColumnType type, {int length = 256}) {
|
||||
_stack.add('ALTER COLUMN $name TYPE ' +
|
||||
MariaDbGenerator.columnType(MigrationColumn(type, length: length)));
|
||||
_stack.add(
|
||||
'ALTER COLUMN $name TYPE ${MariaDbGenerator.columnType(MigrationColumn(type, length: length))}');
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -54,8 +54,11 @@ class MySqlMigrationRunner implements MigrationRunner {
|
|||
@override
|
||||
Future up() async {
|
||||
await _init();
|
||||
var r = await connection.execute('SELECT path from migrations;');
|
||||
var existing = r.rows.cast<String>(); //.expand((x) => x).cast<String>();
|
||||
var result = await connection.execute('SELECT path from migrations;');
|
||||
var existing = <String>[];
|
||||
if (result.rows.isNotEmpty) {
|
||||
existing = result.rows.first.assoc().values.cast<String>().toList();
|
||||
}
|
||||
var toRun = <String>[];
|
||||
|
||||
migrations.forEach((k, v) {
|
||||
|
@ -63,11 +66,14 @@ class MySqlMigrationRunner implements MigrationRunner {
|
|||
});
|
||||
|
||||
if (toRun.isNotEmpty) {
|
||||
var r = await connection.execute('SELECT MAX(batch) from migrations;');
|
||||
var rTmp = r.rows.first; //r.toList();
|
||||
var curBatch =
|
||||
int.tryParse(rTmp.colAt(0) ?? "0") ?? 0; //(rTmp[0][0] ?? 0) as int;
|
||||
var batch = curBatch + 1;
|
||||
var result =
|
||||
await connection.execute('SELECT MAX(batch) from migrations;');
|
||||
var curBatch = 0;
|
||||
if (result.rows.isNotEmpty) {
|
||||
var firstRow = result.rows.first;
|
||||
curBatch = int.tryParse(firstRow.colAt(0) ?? "0") ?? 0;
|
||||
}
|
||||
curBatch++;
|
||||
|
||||
for (var k in toRun) {
|
||||
var migration = migrations[k]!;
|
||||
|
@ -77,16 +83,13 @@ class MySqlMigrationRunner implements MigrationRunner {
|
|||
await schema.run(connection).then((_) async {
|
||||
var result = await connection
|
||||
.execute(
|
||||
"INSERT INTO MIGRATIONS (batch, path) VALUES ($batch, '$k')")
|
||||
"INSERT INTO migrations (batch, path) VALUES ($curBatch, '$k')")
|
||||
.catchError((e) {
|
||||
_log.severe('Failed to insert into "migrations" table.', e);
|
||||
});
|
||||
|
||||
return result.affectedRows.toInt();
|
||||
});
|
||||
//return connection.execute(
|
||||
// 'INSERT INTO MIGRATIONS (batch, path) VALUES ($batch, \'$k\');');
|
||||
|
||||
}
|
||||
} else {
|
||||
_log.warning('Nothing to add into "migrations" table.');
|
||||
|
@ -97,14 +100,18 @@ class MySqlMigrationRunner implements MigrationRunner {
|
|||
Future rollback() async {
|
||||
await _init();
|
||||
|
||||
var r = await connection.execute('SELECT MAX(batch) from migrations;');
|
||||
var rTmp = r.rows.first; //r.toList();
|
||||
var curBatch =
|
||||
int.tryParse(rTmp.colAt(0) ?? "0") ?? 0; //(rTmp[0][0] ?? 0) as int;
|
||||
|
||||
r = await connection
|
||||
var result = await connection.execute('SELECT MAX(batch) from migrations;');
|
||||
var curBatch = 0;
|
||||
if (result.rows.isNotEmpty) {
|
||||
var firstRow = result.rows.first;
|
||||
curBatch = int.tryParse(firstRow.colAt(0) ?? "0") ?? 0;
|
||||
}
|
||||
result = await connection
|
||||
.execute('SELECT path from migrations WHERE batch = $curBatch;');
|
||||
var existing = r.rows.cast<String>(); //r.expand((x) => x).cast<String>();
|
||||
var existing = <String>[];
|
||||
if (result.rows.isNotEmpty) {
|
||||
existing = result.rows.first.assoc().values.cast<String>().toList();
|
||||
}
|
||||
var toRun = <String>[];
|
||||
|
||||
migrations.forEach((k, v) {
|
||||
|
@ -130,9 +137,13 @@ class MySqlMigrationRunner implements MigrationRunner {
|
|||
@override
|
||||
Future reset() async {
|
||||
await _init();
|
||||
var r = await connection
|
||||
var result = await connection
|
||||
.execute('SELECT path from migrations ORDER BY batch DESC;');
|
||||
var existing = r.rows.cast<String>(); //r.expand((x) => x).cast<String>();
|
||||
var existing = <String>[];
|
||||
if (result.rows.isNotEmpty) {
|
||||
var firstRow = result.rows.first;
|
||||
existing = firstRow.assoc().values.cast<String>().toList();
|
||||
}
|
||||
var toRun = existing.where(migrations.containsKey).toList();
|
||||
|
||||
if (toRun.isNotEmpty) {
|
||||
|
|
|
@ -6,7 +6,11 @@ import 'package:charcode/ascii.dart';
|
|||
abstract class MySqlGenerator {
|
||||
static String columnType(MigrationColumn column) {
|
||||
var str = column.type.name;
|
||||
if (column.type.hasSize) {
|
||||
// Map timestamp time to datetime
|
||||
if (column.type == ColumnType.timeStamp) {
|
||||
str = ColumnType.dateTime.name;
|
||||
}
|
||||
if (column.type.hasLength) {
|
||||
return '$str(${column.length})';
|
||||
} else {
|
||||
return str;
|
||||
|
@ -46,7 +50,7 @@ abstract class MySqlGenerator {
|
|||
}
|
||||
|
||||
for (var ref in column.externalReferences) {
|
||||
buf.write(' ' + compileReference(ref));
|
||||
buf.write(' ${compileReference(ref)}');
|
||||
}
|
||||
|
||||
return buf.toString();
|
||||
|
@ -54,7 +58,7 @@ abstract class MySqlGenerator {
|
|||
|
||||
static String compileReference(MigrationColumnReference ref) {
|
||||
var buf = StringBuffer('REFERENCES ${ref.foreignTable}(${ref.foreignKey})');
|
||||
if (ref.behavior != null) buf.write(' ' + ref.behavior!);
|
||||
if (ref.behavior != null) buf.write(' ${ref.behavior!}');
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
@ -147,8 +151,8 @@ class MysqlAlterTable extends Table implements MutableTable {
|
|||
|
||||
@override
|
||||
void changeColumnType(String name, ColumnType type, {int length = 256}) {
|
||||
_stack.add('ALTER COLUMN $name TYPE ' +
|
||||
MySqlGenerator.columnType(MigrationColumn(type, length: length)));
|
||||
_stack.add(
|
||||
'ALTER COLUMN $name TYPE ${MySqlGenerator.columnType(MigrationColumn(type, length: length))}');
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -6,7 +6,7 @@ import 'package:charcode/ascii.dart';
|
|||
abstract class PostgresGenerator {
|
||||
static String columnType(MigrationColumn column) {
|
||||
var str = column.type.name;
|
||||
if (column.type.hasSize) {
|
||||
if (column.type.hasLength) {
|
||||
return '$str(${column.length})';
|
||||
} else {
|
||||
return str;
|
||||
|
@ -31,11 +31,10 @@ abstract class PostgresGenerator {
|
|||
b.writeCharCode(ch);
|
||||
}
|
||||
}
|
||||
s = b.toString();
|
||||
s = '\'${b.toString()}\'';
|
||||
} else {
|
||||
s = value.toString();
|
||||
}
|
||||
|
||||
buf.write(' DEFAULT $s');
|
||||
}
|
||||
|
||||
|
@ -46,7 +45,7 @@ abstract class PostgresGenerator {
|
|||
}
|
||||
|
||||
for (var ref in column.externalReferences) {
|
||||
buf.write(' ' + compileReference(ref));
|
||||
buf.write(' ${compileReference(ref)}');
|
||||
}
|
||||
|
||||
return buf.toString();
|
||||
|
@ -55,7 +54,7 @@ abstract class PostgresGenerator {
|
|||
static String compileReference(MigrationColumnReference ref) {
|
||||
var buf =
|
||||
StringBuffer('REFERENCES "${ref.foreignTable}"("${ref.foreignKey}")');
|
||||
if (ref.behavior != null) buf.write(' ' + ref.behavior!);
|
||||
if (ref.behavior != null) buf.write(' ${ref.behavior!}');
|
||||
return buf.toString();
|
||||
}
|
||||
}
|
||||
|
@ -147,9 +146,9 @@ class PostgresAlterTable extends Table implements MutableTable {
|
|||
}
|
||||
|
||||
@override
|
||||
void changeColumnType(String name, ColumnType type, {int length = 256}) {
|
||||
_stack.add('ALTER COLUMN "$name" TYPE ' +
|
||||
PostgresGenerator.columnType(MigrationColumn(type, length: length)));
|
||||
void changeColumnType(String name, ColumnType type, {int length = 255}) {
|
||||
_stack.add(
|
||||
'ALTER COLUMN "$name" TYPE ${PostgresGenerator.columnType(MigrationColumn(type, length: length))}');
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -18,5 +18,5 @@ Future<String> absoluteSourcePath(Type type) async {
|
|||
}
|
||||
}
|
||||
|
||||
return uri.toFilePath() + '#' + MirrorSystem.getName(mirror.simpleName);
|
||||
return '${uri.toFilePath()}#${MirrorSystem.getName(mirror.simpleName)}';
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: angel3_migration_runner
|
||||
version: 6.0.1
|
||||
version: 6.1.0
|
||||
description: Command-line based database migration runner for Angel3's ORM.
|
||||
homepage: https://angel3-framework.web.app/
|
||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_migration_runner
|
||||
|
@ -11,13 +11,13 @@ dependencies:
|
|||
args: ^2.1.0
|
||||
charcode: ^1.2.0
|
||||
postgres: ^2.4.0
|
||||
mysql_client: ^0.0.15
|
||||
mysql1: ^0.19.0
|
||||
mysql_client: ^0.0.24
|
||||
mysql1: ^0.20.0
|
||||
logging: ^1.0.0
|
||||
dev_dependencies:
|
||||
lints: ^1.0.0
|
||||
# dependency_overrides:
|
||||
# angel3_orm:
|
||||
# path: ../angel_orm
|
||||
# angel3_migration:
|
||||
# path: ../angel_migration
|
||||
dependency_overrides:
|
||||
angel3_orm:
|
||||
path: ../angel_orm
|
||||
angel3_migration:
|
||||
path: ../angel_migration
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
# Change Log
|
||||
|
||||
## 6.1.0
|
||||
|
||||
* Fixed issue #68: Support for non-nullable type
|
||||
* Added `defaultValue` to `@Column` annotation
|
||||
|
||||
## 6.0.1
|
||||
|
||||
* Added `mapToDateTime`
|
||||
|
|
|
@ -18,11 +18,20 @@ class Column {
|
|||
final bool isNullable;
|
||||
|
||||
/// Specifies this column name.
|
||||
final String name;
|
||||
final String? name;
|
||||
|
||||
/// Specifies the length of a `VARCHAR`.
|
||||
final int length;
|
||||
|
||||
/// Specifies the precision of a `NUMERIC` or `DECIMAL`.
|
||||
final int precision;
|
||||
|
||||
/// Specifies the scale of a `NUMERIC` or `DECIMAL`.
|
||||
final int scale;
|
||||
|
||||
/// Specifies the timezone for temporal field.
|
||||
final String? timezone;
|
||||
|
||||
/// Explicitly defines a SQL type for this column.
|
||||
final ColumnType type;
|
||||
|
||||
|
@ -32,13 +41,20 @@ class Column {
|
|||
/// A custom SQL expression to execute, instead of a named column.
|
||||
final String? expression;
|
||||
|
||||
/// Specifies the default values.
|
||||
final dynamic defaultValue;
|
||||
|
||||
const Column(
|
||||
{this.isNullable = true,
|
||||
this.length = 255,
|
||||
this.name = "",
|
||||
this.precision = 17,
|
||||
this.scale = 3,
|
||||
this.name,
|
||||
this.timezone,
|
||||
this.type = ColumnType.varChar,
|
||||
this.indexType = IndexType.none,
|
||||
this.expression});
|
||||
this.expression,
|
||||
this.defaultValue});
|
||||
|
||||
/// Returns `true` if [expression] is not `null`.
|
||||
bool get hasExpression => expression != null;
|
||||
|
@ -71,9 +87,16 @@ enum IndexType {
|
|||
class ColumnType {
|
||||
/// The name of this data type.
|
||||
final String name;
|
||||
final bool hasSize;
|
||||
final bool hasLength;
|
||||
final bool hasPrecision;
|
||||
final bool hasScale;
|
||||
final bool hasTimezone;
|
||||
|
||||
const ColumnType(this.name, [this.hasSize = false]);
|
||||
const ColumnType(this.name,
|
||||
{this.hasLength = false,
|
||||
this.hasPrecision = false,
|
||||
this.hasScale = false,
|
||||
this.hasTimezone = false});
|
||||
|
||||
static const ColumnType boolean = ColumnType('boolean');
|
||||
|
||||
|
@ -87,12 +110,15 @@ class ColumnType {
|
|||
static const ColumnType smallInt = ColumnType('smallint');
|
||||
static const ColumnType tinyInt = ColumnType('tinyint');
|
||||
static const ColumnType bit = ColumnType('bit');
|
||||
static const ColumnType decimal = ColumnType('decimal', true);
|
||||
static const ColumnType numeric = ColumnType('numeric', true);
|
||||
static const ColumnType decimal =
|
||||
ColumnType('decimal', hasPrecision: true, hasScale: true);
|
||||
static const ColumnType numeric =
|
||||
ColumnType('numeric', hasPrecision: true, hasScale: true);
|
||||
static const ColumnType money = ColumnType('money');
|
||||
static const ColumnType smallMoney = ColumnType('smallmoney');
|
||||
static const ColumnType float = ColumnType('float');
|
||||
static const ColumnType real = ColumnType('real');
|
||||
static const ColumnType double = ColumnType('double precision');
|
||||
|
||||
// Dates and times
|
||||
static const ColumnType dateTime = ColumnType('datetime');
|
||||
|
@ -101,35 +127,34 @@ class ColumnType {
|
|||
static const ColumnType time = ColumnType('time');
|
||||
static const ColumnType timeStamp = ColumnType('timestamp');
|
||||
static const ColumnType timeStampWithTimeZone =
|
||||
ColumnType('timestamp with time zone');
|
||||
ColumnType('timestamptz', hasTimezone: true);
|
||||
|
||||
// Strings
|
||||
static const ColumnType char = ColumnType('char', true);
|
||||
static const ColumnType varChar = ColumnType('varchar', true);
|
||||
static const ColumnType char = ColumnType('char', hasLength: true);
|
||||
static const ColumnType varChar = ColumnType('varchar', hasLength: true);
|
||||
static const ColumnType varCharMax = ColumnType('varchar(max)');
|
||||
static const ColumnType text = ColumnType('text', true);
|
||||
static const ColumnType text = ColumnType('text');
|
||||
|
||||
// Unicode strings
|
||||
static const ColumnType nChar = ColumnType('nchar', true);
|
||||
static const ColumnType nVarChar = ColumnType('nvarchar', true);
|
||||
static const ColumnType nVarCharMax = ColumnType('nvarchar(max)', true);
|
||||
static const ColumnType nText = ColumnType('ntext', true);
|
||||
static const ColumnType nChar = ColumnType('nchar', hasLength: true);
|
||||
static const ColumnType nVarChar = ColumnType('nvarchar', hasLength: true);
|
||||
static const ColumnType nVarCharMax = ColumnType('nvarchar(max)');
|
||||
static const ColumnType nText = ColumnType('ntext', hasLength: true);
|
||||
|
||||
// Binary
|
||||
static const ColumnType binary = ColumnType('binary', true);
|
||||
static const ColumnType varBinary = ColumnType('varbinary', true);
|
||||
static const ColumnType varBinaryMax = ColumnType('varbinary(max)', true);
|
||||
static const ColumnType image = ColumnType('image', true);
|
||||
static const ColumnType binary = ColumnType('binary');
|
||||
static const ColumnType varBinary = ColumnType('varbinary');
|
||||
static const ColumnType varBinaryMax = ColumnType('varbinary(max)');
|
||||
static const ColumnType image = ColumnType('image');
|
||||
|
||||
// JSON.
|
||||
static const ColumnType json = ColumnType('json', true);
|
||||
static const ColumnType jsonb = ColumnType('jsonb', true);
|
||||
static const ColumnType json = ColumnType('json');
|
||||
static const ColumnType jsonb = ColumnType('jsonb');
|
||||
|
||||
// Misc.
|
||||
static const ColumnType sqlVariant = ColumnType('sql_variant', true);
|
||||
static const ColumnType uniqueIdentifier =
|
||||
ColumnType('uniqueidentifier', true);
|
||||
static const ColumnType xml = ColumnType('xml', true);
|
||||
static const ColumnType cursor = ColumnType('cursor', true);
|
||||
static const ColumnType table = ColumnType('table', true);
|
||||
static const ColumnType sqlVariant = ColumnType('sql_variant');
|
||||
static const ColumnType uniqueIdentifier = ColumnType('uniqueidentifier');
|
||||
static const ColumnType xml = ColumnType('xml');
|
||||
static const ColumnType cursor = ColumnType('cursor');
|
||||
static const ColumnType table = ColumnType('table');
|
||||
}
|
||||
|
|
|
@ -371,13 +371,13 @@ abstract class Query<T, Where extends QueryWhere> extends QueryBase<T> {
|
|||
var insertion = values?.compileInsert(this, tableName);
|
||||
|
||||
if (insertion == '') {
|
||||
throw StateError('No values have been specified for update.');
|
||||
throw StateError('No values have been specified for insertion.');
|
||||
} else {
|
||||
var sql = compile({});
|
||||
var returningSql = '';
|
||||
if (executor.dialect is PostgreSQLDialect) {
|
||||
var returning = fields.map(adornWithTableName).join(', ');
|
||||
sql = 'WITH $tableName as ($insertion RETURNING $returning) ' + sql;
|
||||
sql = 'WITH $tableName as ($insertion RETURNING $returning) $sql';
|
||||
} else if (executor.dialect is MySQLDialect) {
|
||||
// Default to using 'id' as primary key in model
|
||||
if (fields.contains("id")) {
|
||||
|
@ -397,9 +397,14 @@ abstract class Query<T, Where extends QueryWhere> extends QueryBase<T> {
|
|||
return executor
|
||||
.query(tableName, sql, substitutionValues,
|
||||
returningQuery: returningSql)
|
||||
.then((it) {
|
||||
.then((result) {
|
||||
// Return SQL execution results
|
||||
return it.isEmpty ? Optional.empty() : deserialize(it.first);
|
||||
//if (result.isNotEmpty) {
|
||||
// for (var element in result.first) {
|
||||
// _log.fine("value: $element");
|
||||
// }
|
||||
//}
|
||||
return result.isEmpty ? Optional.empty() : deserialize(result.first);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -422,7 +427,7 @@ abstract class Query<T, Where extends QueryWhere> extends QueryBase<T> {
|
|||
var sql = compile({});
|
||||
var returningSql = '';
|
||||
if (executor.dialect is PostgreSQLDialect) {
|
||||
sql = 'WITH $tableName as ($updateSql RETURNING $returning) ' + sql;
|
||||
sql = 'WITH $tableName as ($updateSql RETURNING $returning) $sql';
|
||||
} else if (executor.dialect is MySQLDialect) {
|
||||
returningSql = sql;
|
||||
sql = '$updateSql';
|
||||
|
|
|
@ -16,7 +16,7 @@ abstract class QueryValues {
|
|||
|
||||
String compileInsert(Query query, String tableName) {
|
||||
var data = Map<String, dynamic>.from(toMap());
|
||||
var now = DateTime.now().toUtc();
|
||||
var now = DateTime.now();
|
||||
if (data.containsKey('created_at') && data['created_at'] == null) {
|
||||
data['created_at'] = now;
|
||||
}
|
||||
|
@ -71,7 +71,7 @@ abstract class QueryValues {
|
|||
if (data.isEmpty) {
|
||||
return '';
|
||||
}
|
||||
var now = DateTime.now().toUtc();
|
||||
var now = DateTime.now();
|
||||
if (data.containsKey('created_at') && data['created_at'] == null) {
|
||||
data.remove('created_at');
|
||||
}
|
||||
|
|
|
@ -2,6 +2,8 @@ import 'package:charcode/ascii.dart';
|
|||
|
||||
bool isAscii(int ch) => ch >= $nul && ch <= $del;
|
||||
|
||||
final DateTime defaultDate = DateTime.parse("1970-01-01 00:00:00");
|
||||
|
||||
bool mapToBool(dynamic value) {
|
||||
if (value is int) {
|
||||
return value != 0;
|
||||
|
@ -20,10 +22,20 @@ String mapToText(dynamic value) {
|
|||
return value;
|
||||
}
|
||||
|
||||
DateTime? mapToDateTime(dynamic value) {
|
||||
/// Helper method to convert dynamic value to DateTime.
|
||||
/// If null return January 1st, 1970 at 00:00:00 UTC as default
|
||||
DateTime mapToDateTime(dynamic value) {
|
||||
if (value == null) {
|
||||
return value;
|
||||
return defaultDate;
|
||||
}
|
||||
if (value is String) {
|
||||
return DateTime.tryParse(value) ?? defaultDate;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/// Helper method to convert dynamic value to nullable DateTime
|
||||
DateTime? mapToNullableDateTime(dynamic value) {
|
||||
if (value is String) {
|
||||
return DateTime.tryParse(value);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: angel3_orm
|
||||
version: 6.0.1
|
||||
version: 6.1.0
|
||||
description: Runtime support for Angel3 ORM. Includes base classes for queries.
|
||||
homepage: https://angel3-framework.web.app/
|
||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
# Change Log
|
||||
|
||||
## 6.2.0
|
||||
|
||||
* Fixed issue #68: Support for non-nullable type
|
||||
* Generate default value based on `defaultValue` in the `@Column` annotation
|
||||
|
||||
## 6.1.0
|
||||
|
||||
* Updated to `analyzer` 4.x.x
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import 'dart:async';
|
||||
import 'package:analyzer/dart/constant/value.dart';
|
||||
import 'package:analyzer/dart/element/element.dart';
|
||||
import 'package:analyzer/dart/element/type.dart';
|
||||
import 'package:angel3_model/angel3_model.dart';
|
||||
|
@ -135,7 +136,9 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
|||
switch (col.type) {
|
||||
case ColumnType.varChar:
|
||||
methodName = 'varChar';
|
||||
named['length'] = literal(col.length);
|
||||
if (col.type.hasLength) {
|
||||
named['length'] = literal(col.length);
|
||||
}
|
||||
break;
|
||||
case ColumnType.serial:
|
||||
methodName = 'serial';
|
||||
|
@ -146,8 +149,12 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
|||
case ColumnType.float:
|
||||
methodName = 'float';
|
||||
break;
|
||||
case ColumnType.double:
|
||||
methodName = 'double';
|
||||
break;
|
||||
case ColumnType.numeric:
|
||||
methodName = 'numeric';
|
||||
if (col.type.hasPrecision) {}
|
||||
break;
|
||||
case ColumnType.boolean:
|
||||
methodName = 'boolean';
|
||||
|
@ -192,7 +199,20 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
|||
|
||||
var defaultValue = ctx.buildContext.defaults[name];
|
||||
|
||||
if (defaultValue != null && !defaultValue.isNull) {
|
||||
// Handle 'defaultValue' on Column annotation
|
||||
if (col.defaultValue != null) {
|
||||
var defaultCode =
|
||||
dartObjectToString(col.defaultValue as DartObject);
|
||||
|
||||
if (defaultCode != null) {
|
||||
Expression defaultExpr = CodeExpression(
|
||||
Code(defaultCode),
|
||||
);
|
||||
cascade.add(refer('defaultsTo').call([defaultExpr]));
|
||||
}
|
||||
|
||||
// Handle 'defaultValue' on SerializableField annotation
|
||||
} else if (defaultValue != null && !defaultValue.isNull) {
|
||||
var type = defaultValue.type;
|
||||
Expression? defaultExpr;
|
||||
|
||||
|
@ -213,9 +233,12 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
|||
// Definitely an analyzer issue.
|
||||
}
|
||||
} else {
|
||||
defaultExpr = CodeExpression(
|
||||
Code(dartObjectToString(defaultValue)!),
|
||||
);
|
||||
var defaultCode = dartObjectToString(defaultValue);
|
||||
if (defaultCode != null) {
|
||||
defaultExpr = CodeExpression(
|
||||
Code(defaultCode),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (defaultExpr != null) {
|
||||
|
@ -230,7 +253,7 @@ class MigrationGenerator extends GeneratorForAnnotation<Orm> {
|
|||
cascade.add(refer('unique').call([]));
|
||||
}
|
||||
|
||||
if (col.isNullable != true) {
|
||||
if (!col.isNullable) {
|
||||
cascade.add(refer('notNull').call([]));
|
||||
}
|
||||
|
||||
|
|
|
@ -105,10 +105,10 @@ Future<OrmBuildContext?> buildOrmContext(
|
|||
// Check for column annotation...
|
||||
var element = _findElement(field);
|
||||
var columnAnnotation = columnTypeChecker.firstAnnotationOf(element);
|
||||
// print('${element.name} => $columnAnnotation');
|
||||
|
||||
Column? column;
|
||||
if (columnAnnotation != null) {
|
||||
// print('[ORM_BUILD_CONTEXT] ${element.name} => $columnAnnotation');
|
||||
column = reviveColumn(ConstantReader(columnAnnotation));
|
||||
}
|
||||
|
||||
|
@ -130,6 +130,7 @@ Future<OrmBuildContext?> buildOrmContext(
|
|||
length: column.length,
|
||||
indexType: column.indexType,
|
||||
type: inferColumnType(field.type),
|
||||
defaultValue: column.defaultValue,
|
||||
);
|
||||
|
||||
// Try to find a relationship
|
||||
|
@ -319,6 +320,7 @@ Future<OrmBuildContext?> buildOrmContext(
|
|||
length: column.length,
|
||||
type: column.type,
|
||||
indexType: column.indexType,
|
||||
defaultValue: column.defaultValue,
|
||||
expression:
|
||||
ConstantReader(columnAnnotation).peek('expression')?.stringValue,
|
||||
);
|
||||
|
@ -334,6 +336,7 @@ Future<OrmBuildContext?> buildOrmContext(
|
|||
return ctx;
|
||||
}
|
||||
|
||||
// Detect and return the correct column type
|
||||
ColumnType inferColumnType(DartType type) {
|
||||
if (const TypeChecker.fromRuntime(String).isAssignableFromType(type)) {
|
||||
return ColumnType.varChar;
|
||||
|
@ -342,10 +345,10 @@ ColumnType inferColumnType(DartType type) {
|
|||
return ColumnType.int;
|
||||
}
|
||||
if (const TypeChecker.fromRuntime(double).isAssignableFromType(type)) {
|
||||
return ColumnType.decimal;
|
||||
return ColumnType.double;
|
||||
}
|
||||
if (const TypeChecker.fromRuntime(num).isAssignableFromType(type)) {
|
||||
return ColumnType.numeric;
|
||||
return ColumnType.float;
|
||||
}
|
||||
if (const TypeChecker.fromRuntime(bool).isAssignableFromType(type)) {
|
||||
return ColumnType.boolean;
|
||||
|
@ -393,6 +396,7 @@ Column reviveColumn(ConstantReader cr) {
|
|||
return Column(
|
||||
isNullable: cr.peek('isNullable')?.boolValue ?? false,
|
||||
length: cr.peek('length')?.intValue ?? 255,
|
||||
defaultValue: cr.peek('defaultValue')?.objectValue,
|
||||
type: columnType,
|
||||
indexType: indexType,
|
||||
);
|
||||
|
@ -401,6 +405,7 @@ Column reviveColumn(ConstantReader cr) {
|
|||
const TypeChecker relationshipTypeChecker =
|
||||
TypeChecker.fromRuntime(Relationship);
|
||||
|
||||
// ORM builder context
|
||||
class OrmBuildContext {
|
||||
final BuildContext buildContext;
|
||||
final Orm ormAnnotation;
|
||||
|
@ -426,7 +431,16 @@ class _ColumnType implements ColumnType {
|
|||
final String name;
|
||||
|
||||
@override
|
||||
final bool hasSize = false;
|
||||
bool hasLength = false;
|
||||
|
||||
@override
|
||||
bool hasPrecision = false;
|
||||
|
||||
@override
|
||||
bool hasScale = false;
|
||||
|
||||
@override
|
||||
bool hasTimezone = false;
|
||||
|
||||
//_ColumnType(this.name, [this.hasSize = false]);
|
||||
_ColumnType(this.name);
|
||||
|
|
|
@ -241,7 +241,7 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
|||
..body = Block((b) {
|
||||
var i = 0;
|
||||
|
||||
// Build the argurments for model
|
||||
// Build the arguments for model
|
||||
var args = <String, Expression>{};
|
||||
for (var field in ctx.effectiveFields) {
|
||||
var fType = field.type;
|
||||
|
@ -274,16 +274,35 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
|||
// Generated Code: mapToBool(row[i])
|
||||
expr = refer('mapToBool').call([expr]);
|
||||
} else if (fType.element?.displayName == 'DateTime') {
|
||||
//print("fType: ${fType.element?.displayName}");
|
||||
// Generated Code: mapToDateTime(row[i])
|
||||
expr = refer('mapToDateTime').call([expr]);
|
||||
if (fType.nullabilitySuffix == NullabilitySuffix.question) {
|
||||
expr = refer('mapToNullableDateTime').call([expr]);
|
||||
} else {
|
||||
expr = refer('mapToDateTime').call([expr]);
|
||||
}
|
||||
} else {
|
||||
// Generated Code: (row[i] as type?)
|
||||
expr = expr.asA(type);
|
||||
}
|
||||
|
||||
Expression defaultRef = refer('null');
|
||||
if (fType.nullabilitySuffix != NullabilitySuffix.question) {
|
||||
if (fType.isDartCoreString) {
|
||||
defaultRef = CodeExpression(Code('\'\''));
|
||||
} else if (fType.isDartCoreBool) {
|
||||
defaultRef = CodeExpression(Code('false'));
|
||||
} else if (fType.isDartCoreDouble) {
|
||||
defaultRef = CodeExpression(Code('0.0'));
|
||||
} else if (fType.isDartCoreInt || fType.isDartCoreNum) {
|
||||
defaultRef = CodeExpression(Code('0'));
|
||||
} else if (fType.element?.displayName == 'DateTime') {
|
||||
defaultRef = CodeExpression(
|
||||
Code('DateTime.parse("1970-01-01 00:00:00")'));
|
||||
}
|
||||
}
|
||||
expr = refer('fields').property('contains').call([
|
||||
literalString(ctx.buildContext.resolveFieldName(field.name)!)
|
||||
]).conditional(expr, refer('null'));
|
||||
]).conditional(expr, defaultRef);
|
||||
args[field.name] = expr;
|
||||
}
|
||||
|
||||
|
@ -481,7 +500,7 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
|||
// , <user_fields>
|
||||
b.write(foreignFields.isEmpty
|
||||
? ''
|
||||
: ', ' + foreignFields.join(', '));
|
||||
: ', ${foreignFields.join(', ')}');
|
||||
// FROM users
|
||||
b.write(' FROM ');
|
||||
b.write(relationForeign.tableName);
|
||||
|
@ -535,9 +554,8 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
|||
// There'll be a private `_field`, and then a getter, named `field`,
|
||||
// that returns the subquery object.
|
||||
|
||||
var foreignQueryType = refer(relationForeign
|
||||
.buildContext.modelClassNameRecase.pascalCase +
|
||||
'Query');
|
||||
var foreignQueryType = refer(
|
||||
'${relationForeign.buildContext.modelClassNameRecase.pascalCase}Query');
|
||||
|
||||
clazz
|
||||
..fields.add(Field((b) => b
|
||||
|
@ -866,7 +884,8 @@ class OrmGenerator extends GeneratorForAnnotation<Orm> {
|
|||
} else if (floatTypes.contains(ctx.columns[field.name]?.type)) {
|
||||
value = refer('double')
|
||||
.property('tryParse')
|
||||
.call([value.asA(refer('String'))]);
|
||||
.call([value.asA(refer('String'))]).ifNullThen(
|
||||
CodeExpression(Code('0.0')));
|
||||
} else {
|
||||
value = value.asA(type);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: angel3_orm_generator
|
||||
version: 6.1.0
|
||||
version: 6.2.0
|
||||
description: Code generators for Angel3 ORM. Generates query builder classes.
|
||||
homepage: https://angel3-framework.web.app/
|
||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_generator
|
||||
|
@ -30,7 +30,7 @@ dev_dependencies:
|
|||
postgres: ^2.4.0
|
||||
test: ^1.21.0
|
||||
lints: ^1.0.0
|
||||
# dependency_overrides:
|
||||
dependency_overrides:
|
||||
# angel3_container:
|
||||
# path: ../../container/angel_container
|
||||
# angel3_framework:
|
||||
|
@ -45,10 +45,10 @@ dev_dependencies:
|
|||
# path: ../../mock_request
|
||||
# angel3_serialize:
|
||||
# path: ../../serialize/angel_serialize
|
||||
# angel3_serialize_generator:
|
||||
# path: ../../serialize/angel_serialize_generator
|
||||
# angel3_orm:
|
||||
# path: ../angel_orm
|
||||
# angel3_migration:
|
||||
# path: ../angel_migration
|
||||
angel3_serialize_generator:
|
||||
path: ../../serialize/angel_serialize_generator
|
||||
angel3_orm:
|
||||
path: ../angel_orm
|
||||
angel3_migration:
|
||||
path: ../angel_migration
|
||||
|
|
@ -1,5 +1,11 @@
|
|||
# Change Log
|
||||
|
||||
## 6.0.0
|
||||
|
||||
* Fixed temporal data type
|
||||
* Fixed `MariaDbExcutor` transaction
|
||||
* Updated to `lints` 2.0.0
|
||||
|
||||
## 6.0.0-beta.3
|
||||
|
||||
* Fixed transaction for `MariaDbExecutor`
|
||||
|
|
|
@ -5,36 +5,20 @@
|
|||
[![Gitter](https://img.shields.io/gitter/room/angel_dart/discussion)](https://gitter.im/angel_dart/discussion)
|
||||
[![License](https://img.shields.io/github/license/dukefirehawk/angel)](https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_mysql/LICENSE)
|
||||
|
||||
This package contains the SQL Executor required by Angel3 ORM to work with MySQL and MariaDB respectively. In order to better support the differences in MySQL and MariaDb underlying protocols, two different drives have to be used. For MariaDb 10.2.x, `mysql1` driver provides the best results, while `mysql_client` driver handles MySQL 8.x.x without issues.
|
||||
This package contains the SQL executor required by Angel3 ORM to work with MySQL and MariaDB respectively. In order to better support both MySQL and MariaDB, two different flavors of drives have been included; `mysql_client` and `mysql1`. They are implmented as `MySqlExecutor` and `MariaDbExecutor` respectively.
|
||||
|
||||
* MariaDbExecutor (beta)
|
||||
* MySqlExecutor (beta)
|
||||
## Supported databases
|
||||
|
||||
## Supported database version
|
||||
* MariaDD 10.2.x or greater
|
||||
* MySQL 8.x or greater
|
||||
|
||||
* MariaDb 10.2.x
|
||||
* MySQL 8.x
|
||||
**Note** MySQL below version 8.0 and MariaDB below version 10.2.0 are not supported as Angel3 ORM requires common table expressions (CTE) to work.
|
||||
|
||||
**Note** MySQL below version 8.0 and MariaDB below version 10.2 are not supported as Angel3 ORM requires common table expressions (CTE).
|
||||
## MySqlExecutor
|
||||
|
||||
## Connecting to MariaDB database 10.2.x
|
||||
This SQL executor is implemented using [`mysql_client`](https://pub.dev/packages?q=mysql_client) driver. It works with both `MySQL` 8.0+ and `MariaDB` 10.2+ database.
|
||||
|
||||
```dart
|
||||
import 'package:mysql1/mysql1.dart';
|
||||
|
||||
var settings = ConnectionSettings(
|
||||
host: 'localhost',
|
||||
port: 3306,
|
||||
db: 'orm_test',
|
||||
user: 'Test',
|
||||
password: 'Test123*');
|
||||
var connection = await MySqlConnection.connect(settings);
|
||||
|
||||
var logger = Logger('orm_mariadb');
|
||||
var executor = MariaDbExecutor(connection, logger: logger);
|
||||
```
|
||||
|
||||
## Connecting to MySQL database 8.x
|
||||
### Connecting to MySQL or MariaDB
|
||||
|
||||
```dart
|
||||
import 'package:mysql_client/mysql_client.dart';
|
||||
|
@ -44,16 +28,81 @@ This package contains the SQL Executor required by Angel3 ORM to work with MySQL
|
|||
port: 3306,
|
||||
databaseName: "orm_test",
|
||||
userName: "test",
|
||||
password: "Test123*",
|
||||
secure: false);
|
||||
password: "test123",
|
||||
secure: true);
|
||||
|
||||
var logger = Logger('orm_mysql');
|
||||
await connection.connect(timeoutMs: 10000);
|
||||
var executor = MySqlExecutor(connection, logger: logger);
|
||||
```
|
||||
|
||||
## Known limitation
|
||||
### Known Limitation for MySqlExecutor
|
||||
|
||||
* UTC time is not supported
|
||||
* Blob is not supported
|
||||
|
||||
* `Blob` data type mapping is not support.
|
||||
* `timestamp` data type mapping is not supported. Use `datetime` instead.
|
||||
* UTC datetime is not supported.
|
||||
|
||||
## MariaDBExecutor
|
||||
|
||||
This SQL executor is implemented using [`mysql1`](https://pub.dev/packages?q=mysql1) driver. It only works with `MariaDB` 10.2+ database. Do not use this for `MySQL` 8.0+ database.
|
||||
|
||||
### Connecting to MariaDB
|
||||
|
||||
```dart
|
||||
import 'package:mysql1/mysql1.dart';
|
||||
|
||||
var settings = ConnectionSettings(
|
||||
host: 'localhost',
|
||||
port: 3306,
|
||||
db: 'orm_test',
|
||||
user: 'test',
|
||||
password: 'test123');
|
||||
var connection = await MySqlConnection.connect(settings);
|
||||
|
||||
var logger = Logger('orm_mariadb');
|
||||
var executor = MariaDbExecutor(connection, logger: logger);
|
||||
```
|
||||
|
||||
### Known Limitation for MariaDBExecutor
|
||||
|
||||
* `Blob` type mapping is not supported.
|
||||
* `timestamp` mapping is not supported. Use `datetime` instead.
|
||||
* Only UTC datetime is supported. None UTC datetime will be automatically converted into UTC datetime.
|
||||
|
||||
## Creating a new database in MariaDB or MySQL
|
||||
|
||||
1. Login to MariaDB/MySQL database console with the following command.
|
||||
|
||||
```bash
|
||||
mysql -u root -p
|
||||
```
|
||||
|
||||
1. Run the following commands to create a new database, `orm_test` and grant both local and remote access to user, `test`. Replace `orm_test`, `test` and `test123` with your own database name, username and password respectively.
|
||||
|
||||
```mysql
|
||||
create database orm_test;
|
||||
|
||||
-- Granting localhost access only
|
||||
create user 'test'@'localhost' identified by 'test123';
|
||||
grant all privileges on orm_test.* to 'test'@'localhost';
|
||||
|
||||
-- Granting localhost and remote access
|
||||
create user 'test'@'%' identified by 'test123';
|
||||
grant all privileges on orm_test.* to 'test'@'%';
|
||||
```
|
||||
|
||||
## Compatibility Matrix
|
||||
|
||||
### MariaDB 10.2+
|
||||
|
||||
| | Create | Read | Update | Delete |
|
||||
|-----------------|--------|--------|--------|--------|
|
||||
| MySqlExecutor | Y | Y | Y | Y |
|
||||
| MariaDBExecutor | Y | Y | Y | Y |
|
||||
|
||||
### MySQL 8.0+
|
||||
|
||||
| | Create | Read | Update | Delete |
|
||||
|-----------------|--------|--------|--------|--------|
|
||||
| MySqlExecutor | Y | Y | Y | Y |
|
||||
| MariaDBExecutor | N | N | N | N |
|
||||
|
|
|
@ -28,8 +28,8 @@ Future<void> mariaDBExample() async {
|
|||
host: 'localhost',
|
||||
port: 3306,
|
||||
db: 'orm_test',
|
||||
user: 'Test',
|
||||
password: 'Test123*');
|
||||
user: 'test',
|
||||
password: 'test123');
|
||||
var connection = await MySqlConnection.connect(settings);
|
||||
|
||||
print("Connected to MariaDb");
|
||||
|
@ -61,8 +61,8 @@ Future<void> mysqlExample() async {
|
|||
port: 3306,
|
||||
databaseName: "orm_test",
|
||||
userName: "test",
|
||||
password: "Test123*",
|
||||
secure: false);
|
||||
password: "test123",
|
||||
secure: true);
|
||||
|
||||
print("Connected to MySQL");
|
||||
var logger = Logger('orm_mysql');
|
||||
|
|
|
@ -9,6 +9,8 @@ class MariaDbExecutor extends QueryExecutor {
|
|||
|
||||
final MySqlConnection _connection;
|
||||
|
||||
TransactionContext? _transactionContext;
|
||||
|
||||
MariaDbExecutor(this._connection, {Logger? logger}) {
|
||||
this.logger = logger ?? Logger('MariaDbExecutor');
|
||||
}
|
||||
|
@ -33,10 +35,19 @@ class MariaDbExecutor extends QueryExecutor {
|
|||
}
|
||||
|
||||
var params = substitutionValues.values.toList();
|
||||
for (var i = 0; i < params.length; i++) {
|
||||
var v = params[i];
|
||||
|
||||
//logger?.warning('Query: $query');
|
||||
//logger?.warning('Values: $params');
|
||||
//logger?.warning('Returning Query: $returningQuery');
|
||||
if (v is DateTime) {
|
||||
if (!v.isUtc) {
|
||||
params[i] = v.toUtc();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.warning('Query: $query');
|
||||
logger.warning('Values: $params');
|
||||
logger.warning('Returning Query: $returningQuery');
|
||||
|
||||
if (returningQuery.isNotEmpty) {
|
||||
// Handle insert, update and delete
|
||||
|
@ -66,11 +77,12 @@ class MariaDbExecutor extends QueryExecutor {
|
|||
@override
|
||||
Future<T> transaction<T>(FutureOr<T> Function(QueryExecutor) f) async {
|
||||
T? returnValue = await _connection.transaction((ctx) async {
|
||||
var conn = ctx as MySqlConnection;
|
||||
// TODO: To be relooked at
|
||||
try {
|
||||
logger.fine('Entering transaction');
|
||||
var tx = MariaDbExecutor(conn, logger: logger);
|
||||
return await f(tx);
|
||||
//var tx = MariaDbExecutor(conn, logger: logger);
|
||||
_transactionContext = ctx;
|
||||
return await f(this);
|
||||
} catch (e) {
|
||||
logger.severe('Failed to run transaction', e);
|
||||
rethrow;
|
||||
|
|
|
@ -131,7 +131,10 @@ class MySqlExecutor extends QueryExecutor {
|
|||
|
||||
// Handle select
|
||||
return _connection.execute(query, substitutionValues).then((results) {
|
||||
logger.warning("SELECT");
|
||||
var tmpData = results.rows;
|
||||
for (var element in tmpData) {
|
||||
logger.warning("[Result] : ${element.assoc()}");
|
||||
}
|
||||
|
||||
return results.rows.map((r) => r.typedAssoc().values.toList()).toList();
|
||||
});
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: angel3_orm_mysql
|
||||
version: 6.0.0-beta.3
|
||||
version: 6.0.0
|
||||
description: MySQL support for Angel3 ORM. Includes functionality for querying and transactions.
|
||||
homepage: https://angel3-framework.web.app/
|
||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_mysql
|
||||
|
@ -8,8 +8,8 @@ environment:
|
|||
dependencies:
|
||||
angel3_orm: ^6.0.0
|
||||
logging: ^1.0.0
|
||||
mysql1: ^0.19.0
|
||||
mysql_client: ^0.0.15
|
||||
mysql1: ^0.20.0
|
||||
mysql_client: ^0.0.24
|
||||
optional: ^6.0.0
|
||||
dev_dependencies:
|
||||
angel3_orm_generator: ^6.0.0
|
||||
|
@ -17,18 +17,18 @@ dev_dependencies:
|
|||
build_runner: ^2.0.1
|
||||
test: ^1.21.0
|
||||
lints: ^1.0.0
|
||||
# dependency_overrides:
|
||||
dependency_overrides:
|
||||
# angel3_serialize:
|
||||
# path: ../../serialize/angel_serialize
|
||||
# angel3_serialize_generator:
|
||||
# path: ../../serialize/angel_serialize_generator
|
||||
# angel3_model:
|
||||
# path: ../../model
|
||||
# angel3_orm_test:
|
||||
# path: ../angel_orm_test
|
||||
# angel3_orm:
|
||||
# path: ../angel_orm
|
||||
angel3_orm_test:
|
||||
path: ../angel_orm_test
|
||||
angel3_orm:
|
||||
path: ../angel_orm
|
||||
# angel3_orm_generator:
|
||||
# path: ../angel_orm_generator
|
||||
# angel3_migration:
|
||||
# path: ../angel_migration
|
||||
angel3_migration:
|
||||
path: ../angel_migration
|
||||
|
|
|
@ -13,26 +13,29 @@ FutureOr<QueryExecutor> Function() createTables(List<String> schemas) {
|
|||
return () => _connectToMySql(schemas);
|
||||
|
||||
// For MariaDB
|
||||
// return () => _connectToMariaDb(tables);
|
||||
//return () => _connectToMariaDb(schemas);
|
||||
}
|
||||
|
||||
// For MySQL
|
||||
Future<void> dropTables(QueryExecutor executor) async {
|
||||
var sqlExecutor = (executor as MySqlExecutor);
|
||||
for (var tableName in tmpTables.reversed) {
|
||||
await sqlExecutor.rawConnection.execute('drop table $tableName;');
|
||||
print('DROP TABLE $tableName');
|
||||
await sqlExecutor.rawConnection.execute('DROP TABLE $tableName;');
|
||||
}
|
||||
|
||||
return sqlExecutor.close();
|
||||
}
|
||||
|
||||
// For MariaDB
|
||||
/* Future<void> dropTables(QueryExecutor executor) {
|
||||
Future<void> dropTables2(QueryExecutor executor) {
|
||||
var sqlExecutor = (executor as MariaDbExecutor);
|
||||
for (var tableName in tmpTables.reversed) {
|
||||
print('DROP TABLE $tableName');
|
||||
sqlExecutor.query(tableName, 'DROP TABLE $tableName', {});
|
||||
}
|
||||
return sqlExecutor.close();
|
||||
} */
|
||||
}
|
||||
|
||||
String extractTableName(String createQuery) {
|
||||
var start = createQuery.indexOf('EXISTS');
|
||||
|
@ -51,8 +54,8 @@ Future<MariaDbExecutor> _connectToMariaDb(List<String> schemas) async {
|
|||
host: 'localhost',
|
||||
port: 3306,
|
||||
db: 'orm_test',
|
||||
user: 'Test',
|
||||
password: 'Test123*');
|
||||
user: 'test',
|
||||
password: 'test123');
|
||||
var connection = await MySqlConnection.connect(settings);
|
||||
|
||||
var logger = Logger('orm_mariadb');
|
||||
|
@ -64,7 +67,7 @@ Future<MariaDbExecutor> _connectToMariaDb(List<String> schemas) async {
|
|||
var data = await File('test/migrations/$s.sql').readAsString();
|
||||
var queries = data.split(";");
|
||||
for (var q in queries) {
|
||||
//print("Table: [$q]");
|
||||
print("Table: [$q]");
|
||||
if (q.trim().isNotEmpty) {
|
||||
//await connection.execute(q);
|
||||
await connection.query(q);
|
||||
|
@ -87,8 +90,8 @@ Future<MySqlExecutor> _connectToMySql(List<String> schemas) async {
|
|||
port: 3306,
|
||||
host: "localhost",
|
||||
userName: Platform.environment['MYSQL_USERNAME'] ?? 'test',
|
||||
password: Platform.environment['MYSQL_PASSWORD'] ?? 'Test123*',
|
||||
secure: false);
|
||||
password: Platform.environment['MYSQL_PASSWORD'] ?? 'test123',
|
||||
secure: true);
|
||||
|
||||
await connection.connect(timeoutMs: 10000);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
CREATE TABLE IF NOT EXISTS authors (
|
||||
id serial PRIMARY KEY,
|
||||
name varchar(255) UNIQUE NOT NULL,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -3,6 +3,6 @@ CREATE TABLE IF NOT EXISTS books (
|
|||
author_id int NOT NULL,
|
||||
partner_author_id int,
|
||||
name varchar(255),
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -3,7 +3,7 @@ CREATE TABLE IF NOT EXISTS cars (
|
|||
make varchar(255) NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
family_friendly BOOLEAN NOT NULL,
|
||||
recalled_at timestamp,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
recalled_at datetime,
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -2,6 +2,6 @@ CREATE TABLE IF NOT EXISTS feet (
|
|||
id serial PRIMARY KEY,
|
||||
leg_id int NOT NULL,
|
||||
n_toes int NOT NULL,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -2,7 +2,7 @@ CREATE TABLE IF NOT EXISTS fruits (
|
|||
id serial,
|
||||
tree_id int,
|
||||
common_name varchar(255),
|
||||
created_at timestamp,
|
||||
updated_at timestamp,
|
||||
created_at datetime,
|
||||
updated_at datetime,
|
||||
PRIMARY KEY(id)
|
||||
);
|
|
@ -1,6 +1,6 @@
|
|||
CREATE TABLE IF NOT EXISTS has_cars (
|
||||
id serial PRIMARY KEY,
|
||||
type int not null,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -2,6 +2,6 @@ CREATE TABLE IF NOT EXISTS has_maps (
|
|||
id serial PRIMARY KEY,
|
||||
value jsonb not null,
|
||||
list jsonb not null,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -1,6 +1,6 @@
|
|||
CREATE TABLE IF NOT EXISTS legs (
|
||||
id serial PRIMARY KEY,
|
||||
name varchar(255) NOT NULL,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -1,7 +1,7 @@
|
|||
CREATE TABLE IF NOT EXISTS numbas (
|
||||
i int NOT NULL UNIQUE,
|
||||
parent int,
|
||||
created_at TIMESTAMP,
|
||||
updated_at TIMESTAMP,
|
||||
created_at datetime,
|
||||
updated_at datetime,
|
||||
PRIMARY KEY(i)
|
||||
);
|
|
@ -1,6 +1,6 @@
|
|||
CREATE TABLE IF NOT EXISTS roles (
|
||||
id serial PRIMARY KEY,
|
||||
name varchar(255),
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -2,7 +2,7 @@ CREATE TABLE IF NOT EXISTS songs (
|
|||
id serial,
|
||||
weird_join_id int,
|
||||
title varchar(255),
|
||||
created_at TIMESTAMP,
|
||||
updated_at TIMESTAMP,
|
||||
created_at datetime,
|
||||
updated_at datetime,
|
||||
PRIMARY KEY(id)
|
||||
);
|
|
@ -1,7 +1,7 @@
|
|||
CREATE TABLE IF NOT EXISTS trees (
|
||||
id serial,
|
||||
rings smallint UNIQUE,
|
||||
created_at timestamp,
|
||||
updated_at timestamp,
|
||||
created_at datetime,
|
||||
updated_at datetime,
|
||||
PRIMARY KEY(id)
|
||||
);
|
|
@ -3,6 +3,6 @@ CREATE TABLE IF NOT EXISTS users (
|
|||
username varchar(255),
|
||||
password varchar(255),
|
||||
email varchar(255),
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -2,6 +2,6 @@ CREATE TABLE IF NOT EXISTS role_users (
|
|||
id serial PRIMARY KEY,
|
||||
user_id int NOT NULL,
|
||||
role_id int NOT NULL,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
created_at datetime,
|
||||
updated_at datetime
|
||||
);
|
|
@ -14,7 +14,7 @@ void main() async {
|
|||
// if (rec.stackTrace != null) print(rec.stackTrace);
|
||||
//});
|
||||
|
||||
belongsToTests(createTables(['author', 'book']), close: dropTables);
|
||||
//belongsToTests(createTables(['author', 'book']), close: dropTables);
|
||||
|
||||
//hasOneTests(my(['leg', 'foot']), close: closeMy);
|
||||
//standaloneTests(my(['car']), close: closeMy);
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
# Change Log
|
||||
|
||||
## 6.1.0
|
||||
|
||||
* Fixed #71. Restablish broken connection automatically.
|
||||
|
||||
## 6.0.0
|
||||
|
||||
* Updated to SDK 2.16.x
|
||||
|
|
|
@ -9,6 +9,6 @@ PostgreSQL support for Angel3 ORM.
|
|||
|
||||
## Supported database
|
||||
|
||||
* PostgreSQL version 10 or later
|
||||
* PostgreSQL version 10 or greater
|
||||
|
||||
For documentation about the ORM, see [Developer Guide](https://angel3-docs.dukefirehawk.com/guides/orm)
|
||||
|
|
|
@ -7,7 +7,7 @@ import 'package:postgres/postgres.dart';
|
|||
|
||||
/// A [QueryExecutor] that queries a PostgreSQL database.
|
||||
class PostgreSqlExecutor extends QueryExecutor {
|
||||
final PostgreSQLExecutionContext _connection;
|
||||
PostgreSQLExecutionContext _connection;
|
||||
|
||||
/// An optional [Logger] to print information to. A default logger will be used
|
||||
/// if not set
|
||||
|
@ -57,7 +57,43 @@ class PostgreSqlExecutor extends QueryExecutor {
|
|||
}
|
||||
});
|
||||
|
||||
return _connection.query(query, substitutionValues: param);
|
||||
return _connection
|
||||
.query(query, substitutionValues: param)
|
||||
.catchError((err) async {
|
||||
logger.warning(err);
|
||||
if (err is PostgreSQLException) {
|
||||
// This is a hack to detect broken db connection
|
||||
bool brokenConnection =
|
||||
err.message?.contains("connection is not open") ?? false;
|
||||
if (brokenConnection) {
|
||||
if (_connection is PostgreSQLConnection) {
|
||||
// Open a new db connection
|
||||
var currentConnection = _connection as PostgreSQLConnection;
|
||||
currentConnection.close();
|
||||
|
||||
logger.warning(
|
||||
"A broken database connection is detected. Creating a new database connection.");
|
||||
var conn = _createNewConnection(currentConnection);
|
||||
await conn.open();
|
||||
_connection = conn;
|
||||
|
||||
// Retry the query with the new db connection
|
||||
return _connection.query(query, substitutionValues: param);
|
||||
}
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
// Create a new database connection from an existing connection
|
||||
PostgreSQLConnection _createNewConnection(PostgreSQLConnection conn) {
|
||||
return PostgreSQLConnection(conn.host, conn.port, conn.databaseName,
|
||||
username: conn.username,
|
||||
password: conn.password,
|
||||
useSSL: conn.useSSL,
|
||||
timeZone: conn.timeZone,
|
||||
timeoutInSeconds: conn.timeoutInSeconds);
|
||||
}
|
||||
|
||||
@override
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: angel3_orm_postgres
|
||||
version: 6.0.0
|
||||
version: 6.1.0
|
||||
description: PostgreSQL support for Angel3 ORM. Includes functionality for querying and transactions.
|
||||
homepage: https://angel3-framework.web.app/
|
||||
repository: https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_postgres
|
||||
|
@ -17,13 +17,13 @@ dev_dependencies:
|
|||
test: ^1.21.0
|
||||
lints: ^1.0.0
|
||||
dependency_overrides:
|
||||
# angel3_serialize:
|
||||
# path: ../../serialize/angel_serialize
|
||||
angel3_serialize:
|
||||
path: ../../serialize/angel_serialize
|
||||
# angel3_model:
|
||||
# path: ../../model
|
||||
angel3_orm_test:
|
||||
angel3_orm_test:
|
||||
path: ../angel_orm_test
|
||||
angel3_orm:
|
||||
angel3_orm:
|
||||
path: ../angel_orm
|
||||
# angel3_migration:
|
||||
# path: ../angel_migration
|
||||
angel3_migration:
|
||||
path: ../angel_migration
|
||||
|
|
|
@ -2,7 +2,7 @@ CREATE TEMPORARY TABLE "person_orders" (
|
|||
"id" serial PRIMARY KEY,
|
||||
"person_id" int not null,
|
||||
"name" varchar(255),
|
||||
"price" int,
|
||||
"price" float,
|
||||
"deleted" bool not null default false,
|
||||
"created_at" timestamp,
|
||||
"updated_at" timestamp
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# Change Log
|
||||
|
||||
## 6.0.1
|
||||
## 6.1.0
|
||||
|
||||
* Updated generated test cases
|
||||
* Added test cases for non nullable type
|
||||
|
||||
## 6.0.0
|
||||
|
||||
|
|
|
@ -5,4 +5,10 @@
|
|||
[![Gitter](https://img.shields.io/gitter/room/angel_dart/discussion)](https://gitter.im/angel_dart/discussion)
|
||||
[![License](https://img.shields.io/github/license/dukefirehawk/angel)](https://github.com/dukefirehawk/angel/tree/master/packages/orm/angel_orm_test/LICENSE)
|
||||
|
||||
Common tests for Angel3 ORM. Reference implmentation of generated ORM files.
|
||||
Common test cases for Angel3 ORM. Reference implmentation of generated ORM files.
|
||||
|
||||
## Supported databases
|
||||
|
||||
* MariaDb 10.2.x or greater
|
||||
* MySQL 8.x or greater
|
||||
* PostreSQL 10.x or greater
|
||||
|
|
26
packages/orm/angel_orm_test/lib/src/models/asset.dart
Normal file
26
packages/orm/angel_orm_test/lib/src/models/asset.dart
Normal file
|
@ -0,0 +1,26 @@
|
|||
import 'package:angel3_orm/angel3_orm.dart';
|
||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||
import 'package:angel3_migration/angel3_migration.dart';
|
||||
import 'package:optional/optional.dart';
|
||||
|
||||
part 'asset.g.dart';
|
||||
|
||||
@serializable
|
||||
@orm
|
||||
abstract class _Item extends Model {
|
||||
String get description;
|
||||
}
|
||||
|
||||
@serializable
|
||||
@orm
|
||||
abstract class _Asset extends Model {
|
||||
String get description;
|
||||
|
||||
String get name;
|
||||
|
||||
@Column(type: ColumnType.numeric, precision: 17, scale: 3)
|
||||
double get price;
|
||||
|
||||
@hasMany
|
||||
List<_Item> get items;
|
||||
}
|
697
packages/orm/angel_orm_test/lib/src/models/asset.g.dart
Normal file
697
packages/orm/angel_orm_test/lib/src/models/asset.g.dart
Normal file
|
@ -0,0 +1,697 @@
|
|||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||
|
||||
part of 'asset.dart';
|
||||
|
||||
// **************************************************************************
|
||||
// MigrationGenerator
|
||||
// **************************************************************************
|
||||
|
||||
class ItemMigration extends Migration {
|
||||
@override
|
||||
void up(Schema schema) {
|
||||
schema.create('items', (table) {
|
||||
table.serial('id').primaryKey();
|
||||
table.timeStamp('created_at');
|
||||
table.timeStamp('updated_at');
|
||||
table.varChar('description', length: 255);
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
void down(Schema schema) {
|
||||
schema.drop('items');
|
||||
}
|
||||
}
|
||||
|
||||
class AssetMigration extends Migration {
|
||||
@override
|
||||
void up(Schema schema) {
|
||||
schema.create('assets', (table) {
|
||||
table.serial('id').primaryKey();
|
||||
table.timeStamp('created_at');
|
||||
table.timeStamp('updated_at');
|
||||
table.varChar('description', length: 255);
|
||||
table.varChar('name', length: 255);
|
||||
table.double('price');
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
void down(Schema schema) {
|
||||
schema.drop('assets', cascade: true);
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// OrmGenerator
|
||||
// **************************************************************************
|
||||
|
||||
class ItemQuery extends Query<Item, ItemQueryWhere> {
|
||||
ItemQuery({Query? parent, Set<String>? trampoline}) : super(parent: parent) {
|
||||
trampoline ??= <String>{};
|
||||
trampoline.add(tableName);
|
||||
_where = ItemQueryWhere(this);
|
||||
}
|
||||
|
||||
@override
|
||||
final ItemQueryValues values = ItemQueryValues();
|
||||
|
||||
List<String> _selectedFields = [];
|
||||
|
||||
ItemQueryWhere? _where;
|
||||
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {};
|
||||
}
|
||||
|
||||
@override
|
||||
String get tableName {
|
||||
return 'items';
|
||||
}
|
||||
|
||||
@override
|
||||
List<String> get fields {
|
||||
const _fields = ['id', 'created_at', 'updated_at', 'description'];
|
||||
return _selectedFields.isEmpty
|
||||
? _fields
|
||||
: _fields.where((field) => _selectedFields.contains(field)).toList();
|
||||
}
|
||||
|
||||
ItemQuery select(List<String> selectedFields) {
|
||||
_selectedFields = selectedFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
@override
|
||||
ItemQueryWhere? get where {
|
||||
return _where;
|
||||
}
|
||||
|
||||
@override
|
||||
ItemQueryWhere newWhereClause() {
|
||||
return ItemQueryWhere(this);
|
||||
}
|
||||
|
||||
Optional<Item> parseRow(List row) {
|
||||
if (row.every((x) => x == null)) {
|
||||
return Optional.empty();
|
||||
}
|
||||
var model = Item(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
description: fields.contains('description') ? (row[3] as String) : '');
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
||||
@override
|
||||
Optional<Item> deserialize(List row) {
|
||||
return parseRow(row);
|
||||
}
|
||||
}
|
||||
|
||||
class ItemQueryWhere extends QueryWhere {
|
||||
ItemQueryWhere(ItemQuery query)
|
||||
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
||||
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
||||
description = StringSqlExpressionBuilder(query, 'description');
|
||||
|
||||
final NumericSqlExpressionBuilder<int> id;
|
||||
|
||||
final DateTimeSqlExpressionBuilder createdAt;
|
||||
|
||||
final DateTimeSqlExpressionBuilder updatedAt;
|
||||
|
||||
final StringSqlExpressionBuilder description;
|
||||
|
||||
@override
|
||||
List<SqlExpressionBuilder> get expressionBuilders {
|
||||
return [id, createdAt, updatedAt, description];
|
||||
}
|
||||
}
|
||||
|
||||
class ItemQueryValues extends MapQueryValues {
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {};
|
||||
}
|
||||
|
||||
String? get id {
|
||||
return (values['id'] as String?);
|
||||
}
|
||||
|
||||
set id(String? value) => values['id'] = value;
|
||||
DateTime? get createdAt {
|
||||
return (values['created_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set createdAt(DateTime? value) => values['created_at'] = value;
|
||||
DateTime? get updatedAt {
|
||||
return (values['updated_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||
String get description {
|
||||
return (values['description'] as String);
|
||||
}
|
||||
|
||||
set description(String value) => values['description'] = value;
|
||||
void copyFrom(Item model) {
|
||||
createdAt = model.createdAt;
|
||||
updatedAt = model.updatedAt;
|
||||
description = model.description;
|
||||
}
|
||||
}
|
||||
|
||||
class AssetQuery extends Query<Asset, AssetQueryWhere> {
|
||||
AssetQuery({Query? parent, Set<String>? trampoline}) : super(parent: parent) {
|
||||
trampoline ??= <String>{};
|
||||
trampoline.add(tableName);
|
||||
_where = AssetQueryWhere(this);
|
||||
leftJoin(_items = ItemQuery(trampoline: trampoline, parent: this), 'id',
|
||||
'asset_id',
|
||||
additionalFields: const [
|
||||
'id',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'description'
|
||||
],
|
||||
trampoline: trampoline);
|
||||
}
|
||||
|
||||
@override
|
||||
final AssetQueryValues values = AssetQueryValues();
|
||||
|
||||
List<String> _selectedFields = [];
|
||||
|
||||
AssetQueryWhere? _where;
|
||||
|
||||
late ItemQuery _items;
|
||||
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {'price': 'char'};
|
||||
}
|
||||
|
||||
@override
|
||||
String get tableName {
|
||||
return 'assets';
|
||||
}
|
||||
|
||||
@override
|
||||
List<String> get fields {
|
||||
const _fields = [
|
||||
'id',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'description',
|
||||
'name',
|
||||
'price'
|
||||
];
|
||||
return _selectedFields.isEmpty
|
||||
? _fields
|
||||
: _fields.where((field) => _selectedFields.contains(field)).toList();
|
||||
}
|
||||
|
||||
AssetQuery select(List<String> selectedFields) {
|
||||
_selectedFields = selectedFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
@override
|
||||
AssetQueryWhere? get where {
|
||||
return _where;
|
||||
}
|
||||
|
||||
@override
|
||||
AssetQueryWhere newWhereClause() {
|
||||
return AssetQueryWhere(this);
|
||||
}
|
||||
|
||||
Optional<Asset> parseRow(List row) {
|
||||
if (row.every((x) => x == null)) {
|
||||
return Optional.empty();
|
||||
}
|
||||
var model = Asset(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
description: fields.contains('description') ? (row[3] as String) : '',
|
||||
name: fields.contains('name') ? (row[4] as String) : '',
|
||||
price: fields.contains('price') ? mapToDouble(row[5]) : 0.0);
|
||||
if (row.length > 6) {
|
||||
var modelOpt = ItemQuery().parseRow(row.skip(6).take(4).toList());
|
||||
modelOpt.ifPresent((m) {
|
||||
model = model.copyWith(items: [m]);
|
||||
});
|
||||
}
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
||||
@override
|
||||
Optional<Asset> deserialize(List row) {
|
||||
return parseRow(row);
|
||||
}
|
||||
|
||||
ItemQuery get items {
|
||||
return _items;
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<Asset>> get(QueryExecutor executor) {
|
||||
return super.get(executor).then((result) {
|
||||
return result.fold<List<Asset>>([], (out, model) {
|
||||
var idx = out.indexWhere((m) => m.id == model.id);
|
||||
|
||||
if (idx == -1) {
|
||||
return out..add(model);
|
||||
} else {
|
||||
var l = out[idx];
|
||||
return out
|
||||
..[idx] = l.copyWith(
|
||||
items: List<_Item>.from(l.items)..addAll(model.items));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<Asset>> update(QueryExecutor executor) {
|
||||
return super.update(executor).then((result) {
|
||||
return result.fold<List<Asset>>([], (out, model) {
|
||||
var idx = out.indexWhere((m) => m.id == model.id);
|
||||
|
||||
if (idx == -1) {
|
||||
return out..add(model);
|
||||
} else {
|
||||
var l = out[idx];
|
||||
return out
|
||||
..[idx] = l.copyWith(
|
||||
items: List<_Item>.from(l.items)..addAll(model.items));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
Future<List<Asset>> delete(QueryExecutor executor) {
|
||||
return super.delete(executor).then((result) {
|
||||
return result.fold<List<Asset>>([], (out, model) {
|
||||
var idx = out.indexWhere((m) => m.id == model.id);
|
||||
|
||||
if (idx == -1) {
|
||||
return out..add(model);
|
||||
} else {
|
||||
var l = out[idx];
|
||||
return out
|
||||
..[idx] = l.copyWith(
|
||||
items: List<_Item>.from(l.items)..addAll(model.items));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
class AssetQueryWhere extends QueryWhere {
|
||||
AssetQueryWhere(AssetQuery query)
|
||||
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
||||
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
||||
description = StringSqlExpressionBuilder(query, 'description'),
|
||||
name = StringSqlExpressionBuilder(query, 'name'),
|
||||
price = NumericSqlExpressionBuilder<double>(query, 'price');
|
||||
|
||||
final NumericSqlExpressionBuilder<int> id;
|
||||
|
||||
final DateTimeSqlExpressionBuilder createdAt;
|
||||
|
||||
final DateTimeSqlExpressionBuilder updatedAt;
|
||||
|
||||
final StringSqlExpressionBuilder description;
|
||||
|
||||
final StringSqlExpressionBuilder name;
|
||||
|
||||
final NumericSqlExpressionBuilder<double> price;
|
||||
|
||||
@override
|
||||
List<SqlExpressionBuilder> get expressionBuilders {
|
||||
return [id, createdAt, updatedAt, description, name, price];
|
||||
}
|
||||
}
|
||||
|
||||
class AssetQueryValues extends MapQueryValues {
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {'price': 'double precision'};
|
||||
}
|
||||
|
||||
String? get id {
|
||||
return (values['id'] as String?);
|
||||
}
|
||||
|
||||
set id(String? value) => values['id'] = value;
|
||||
DateTime? get createdAt {
|
||||
return (values['created_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set createdAt(DateTime? value) => values['created_at'] = value;
|
||||
DateTime? get updatedAt {
|
||||
return (values['updated_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||
String get description {
|
||||
return (values['description'] as String);
|
||||
}
|
||||
|
||||
set description(String value) => values['description'] = value;
|
||||
String get name {
|
||||
return (values['name'] as String);
|
||||
}
|
||||
|
||||
set name(String value) => values['name'] = value;
|
||||
double get price {
|
||||
return double.tryParse((values['price'] as String)) ?? 0.0;
|
||||
}
|
||||
|
||||
set price(double value) => values['price'] = value.toString();
|
||||
void copyFrom(Asset model) {
|
||||
createdAt = model.createdAt;
|
||||
updatedAt = model.updatedAt;
|
||||
description = model.description;
|
||||
name = model.name;
|
||||
price = model.price;
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// JsonModelGenerator
|
||||
// **************************************************************************
|
||||
|
||||
@generatedSerializable
|
||||
class Item extends _Item {
|
||||
Item({this.id, this.createdAt, this.updatedAt, required this.description});
|
||||
|
||||
/// A unique identifier corresponding to this item.
|
||||
@override
|
||||
String? id;
|
||||
|
||||
/// The time at which this item was created.
|
||||
@override
|
||||
DateTime? createdAt;
|
||||
|
||||
/// The last time at which this item was updated.
|
||||
@override
|
||||
DateTime? updatedAt;
|
||||
|
||||
@override
|
||||
String description;
|
||||
|
||||
Item copyWith(
|
||||
{String? id,
|
||||
DateTime? createdAt,
|
||||
DateTime? updatedAt,
|
||||
String? description}) {
|
||||
return Item(
|
||||
id: id ?? this.id,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
description: description ?? this.description);
|
||||
}
|
||||
|
||||
@override
|
||||
bool operator ==(other) {
|
||||
return other is _Item &&
|
||||
other.id == id &&
|
||||
other.createdAt == createdAt &&
|
||||
other.updatedAt == updatedAt &&
|
||||
other.description == description;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return hashObjects([id, createdAt, updatedAt, description]);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'Item(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, description=$description)';
|
||||
}
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
return ItemSerializer.toMap(this);
|
||||
}
|
||||
}
|
||||
|
||||
@generatedSerializable
|
||||
class Asset extends _Asset {
|
||||
Asset(
|
||||
{this.id,
|
||||
this.createdAt,
|
||||
this.updatedAt,
|
||||
required this.description,
|
||||
required this.name,
|
||||
required this.price,
|
||||
List<_Item> items = const []})
|
||||
: items = List.unmodifiable(items);
|
||||
|
||||
/// A unique identifier corresponding to this item.
|
||||
@override
|
||||
String? id;
|
||||
|
||||
/// The time at which this item was created.
|
||||
@override
|
||||
DateTime? createdAt;
|
||||
|
||||
/// The last time at which this item was updated.
|
||||
@override
|
||||
DateTime? updatedAt;
|
||||
|
||||
@override
|
||||
String description;
|
||||
|
||||
@override
|
||||
String name;
|
||||
|
||||
@override
|
||||
double price;
|
||||
|
||||
@override
|
||||
List<_Item> items;
|
||||
|
||||
Asset copyWith(
|
||||
{String? id,
|
||||
DateTime? createdAt,
|
||||
DateTime? updatedAt,
|
||||
String? description,
|
||||
String? name,
|
||||
double? price,
|
||||
List<_Item>? items}) {
|
||||
return Asset(
|
||||
id: id ?? this.id,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
description: description ?? this.description,
|
||||
name: name ?? this.name,
|
||||
price: price ?? this.price,
|
||||
items: items ?? this.items);
|
||||
}
|
||||
|
||||
@override
|
||||
bool operator ==(other) {
|
||||
return other is _Asset &&
|
||||
other.id == id &&
|
||||
other.createdAt == createdAt &&
|
||||
other.updatedAt == updatedAt &&
|
||||
other.description == description &&
|
||||
other.name == name &&
|
||||
other.price == price &&
|
||||
ListEquality<_Item>(DefaultEquality<_Item>())
|
||||
.equals(other.items, items);
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return hashObjects(
|
||||
[id, createdAt, updatedAt, description, name, price, items]);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'Asset(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, description=$description, name=$name, price=$price, items=$items)';
|
||||
}
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
return AssetSerializer.toMap(this);
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const ItemSerializer itemSerializer = ItemSerializer();
|
||||
|
||||
class ItemEncoder extends Converter<Item, Map> {
|
||||
const ItemEncoder();
|
||||
|
||||
@override
|
||||
Map convert(Item model) => ItemSerializer.toMap(model);
|
||||
}
|
||||
|
||||
class ItemDecoder extends Converter<Map, Item> {
|
||||
const ItemDecoder();
|
||||
|
||||
@override
|
||||
Item convert(Map map) => ItemSerializer.fromMap(map);
|
||||
}
|
||||
|
||||
class ItemSerializer extends Codec<Item, Map> {
|
||||
const ItemSerializer();
|
||||
|
||||
@override
|
||||
ItemEncoder get encoder => const ItemEncoder();
|
||||
@override
|
||||
ItemDecoder get decoder => const ItemDecoder();
|
||||
static Item fromMap(Map map) {
|
||||
return Item(
|
||||
id: map['id'] as String?,
|
||||
createdAt: map['created_at'] != null
|
||||
? (map['created_at'] is DateTime
|
||||
? (map['created_at'] as DateTime)
|
||||
: DateTime.parse(map['created_at'].toString()))
|
||||
: null,
|
||||
updatedAt: map['updated_at'] != null
|
||||
? (map['updated_at'] is DateTime
|
||||
? (map['updated_at'] as DateTime)
|
||||
: DateTime.parse(map['updated_at'].toString()))
|
||||
: null,
|
||||
description: map['description'] as String);
|
||||
}
|
||||
|
||||
static Map<String, dynamic> toMap(_Item? model) {
|
||||
if (model == null) {
|
||||
throw FormatException("Required field [model] cannot be null");
|
||||
}
|
||||
return {
|
||||
'id': model.id,
|
||||
'created_at': model.createdAt?.toIso8601String(),
|
||||
'updated_at': model.updatedAt?.toIso8601String(),
|
||||
'description': model.description
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
abstract class ItemFields {
|
||||
static const List<String> allFields = <String>[
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
description
|
||||
];
|
||||
|
||||
static const String id = 'id';
|
||||
|
||||
static const String createdAt = 'created_at';
|
||||
|
||||
static const String updatedAt = 'updated_at';
|
||||
|
||||
static const String description = 'description';
|
||||
}
|
||||
|
||||
const AssetSerializer assetSerializer = AssetSerializer();
|
||||
|
||||
class AssetEncoder extends Converter<Asset, Map> {
|
||||
const AssetEncoder();
|
||||
|
||||
@override
|
||||
Map convert(Asset model) => AssetSerializer.toMap(model);
|
||||
}
|
||||
|
||||
class AssetDecoder extends Converter<Map, Asset> {
|
||||
const AssetDecoder();
|
||||
|
||||
@override
|
||||
Asset convert(Map map) => AssetSerializer.fromMap(map);
|
||||
}
|
||||
|
||||
class AssetSerializer extends Codec<Asset, Map> {
|
||||
const AssetSerializer();
|
||||
|
||||
@override
|
||||
AssetEncoder get encoder => const AssetEncoder();
|
||||
@override
|
||||
AssetDecoder get decoder => const AssetDecoder();
|
||||
static Asset fromMap(Map map) {
|
||||
return Asset(
|
||||
id: map['id'] as String?,
|
||||
createdAt: map['created_at'] != null
|
||||
? (map['created_at'] is DateTime
|
||||
? (map['created_at'] as DateTime)
|
||||
: DateTime.parse(map['created_at'].toString()))
|
||||
: null,
|
||||
updatedAt: map['updated_at'] != null
|
||||
? (map['updated_at'] is DateTime
|
||||
? (map['updated_at'] as DateTime)
|
||||
: DateTime.parse(map['updated_at'].toString()))
|
||||
: null,
|
||||
description: map['description'] as String,
|
||||
name: map['name'] as String,
|
||||
price: map['price'] as double,
|
||||
items: map['items'] is Iterable
|
||||
? List.unmodifiable(((map['items'] as Iterable).whereType<Map>())
|
||||
.map(ItemSerializer.fromMap))
|
||||
: []);
|
||||
}
|
||||
|
||||
static Map<String, dynamic> toMap(_Asset? model) {
|
||||
if (model == null) {
|
||||
throw FormatException("Required field [model] cannot be null");
|
||||
}
|
||||
return {
|
||||
'id': model.id,
|
||||
'created_at': model.createdAt?.toIso8601String(),
|
||||
'updated_at': model.updatedAt?.toIso8601String(),
|
||||
'description': model.description,
|
||||
'name': model.name,
|
||||
'price': model.price,
|
||||
'items': model.items.map((m) => ItemSerializer.toMap(m)).toList()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
abstract class AssetFields {
|
||||
static const List<String> allFields = <String>[
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
description,
|
||||
name,
|
||||
price,
|
||||
items
|
||||
];
|
||||
|
||||
static const String id = 'id';
|
||||
|
||||
static const String createdAt = 'created_at';
|
||||
|
||||
static const String updatedAt = 'updated_at';
|
||||
|
||||
static const String description = 'description';
|
||||
|
||||
static const String name = 'name';
|
||||
|
||||
static const String price = 'price';
|
||||
|
||||
static const String items = 'items';
|
||||
}
|
22
packages/orm/angel_orm_test/lib/src/models/bike.dart
Normal file
22
packages/orm/angel_orm_test/lib/src/models/bike.dart
Normal file
|
@ -0,0 +1,22 @@
|
|||
import 'package:angel3_orm/angel3_orm.dart';
|
||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||
import 'package:angel3_migration/angel3_migration.dart';
|
||||
import 'package:optional/optional.dart';
|
||||
|
||||
part 'bike.g.dart';
|
||||
|
||||
@serializable
|
||||
@orm
|
||||
abstract class _Bike extends Model {
|
||||
String get make;
|
||||
|
||||
String get description;
|
||||
|
||||
bool get familyFriendly;
|
||||
|
||||
DateTime get recalledAt;
|
||||
|
||||
double get price;
|
||||
|
||||
int get width;
|
||||
}
|
439
packages/orm/angel_orm_test/lib/src/models/bike.g.dart
Normal file
439
packages/orm/angel_orm_test/lib/src/models/bike.g.dart
Normal file
|
@ -0,0 +1,439 @@
|
|||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||
|
||||
part of 'bike.dart';
|
||||
|
||||
// **************************************************************************
|
||||
// MigrationGenerator
|
||||
// **************************************************************************
|
||||
|
||||
class BikeMigration extends Migration {
|
||||
@override
|
||||
void up(Schema schema) {
|
||||
schema.create('bikes', (table) {
|
||||
table.serial('id').primaryKey();
|
||||
table.timeStamp('created_at');
|
||||
table.timeStamp('updated_at');
|
||||
table.varChar('make', length: 255);
|
||||
table.varChar('description', length: 255);
|
||||
table.boolean('family_friendly');
|
||||
table.timeStamp('recalled_at');
|
||||
table.double('price');
|
||||
table.integer('width');
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
void down(Schema schema) {
|
||||
schema.drop('bikes');
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// OrmGenerator
|
||||
// **************************************************************************
|
||||
|
||||
class BikeQuery extends Query<Bike, BikeQueryWhere> {
|
||||
BikeQuery({Query? parent, Set<String>? trampoline}) : super(parent: parent) {
|
||||
trampoline ??= <String>{};
|
||||
trampoline.add(tableName);
|
||||
_where = BikeQueryWhere(this);
|
||||
}
|
||||
|
||||
@override
|
||||
final BikeQueryValues values = BikeQueryValues();
|
||||
|
||||
List<String> _selectedFields = [];
|
||||
|
||||
BikeQueryWhere? _where;
|
||||
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {'price': 'char'};
|
||||
}
|
||||
|
||||
@override
|
||||
String get tableName {
|
||||
return 'bikes';
|
||||
}
|
||||
|
||||
@override
|
||||
List<String> get fields {
|
||||
const _fields = [
|
||||
'id',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'make',
|
||||
'description',
|
||||
'family_friendly',
|
||||
'recalled_at',
|
||||
'price',
|
||||
'width'
|
||||
];
|
||||
return _selectedFields.isEmpty
|
||||
? _fields
|
||||
: _fields.where((field) => _selectedFields.contains(field)).toList();
|
||||
}
|
||||
|
||||
BikeQuery select(List<String> selectedFields) {
|
||||
_selectedFields = selectedFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
@override
|
||||
BikeQueryWhere? get where {
|
||||
return _where;
|
||||
}
|
||||
|
||||
@override
|
||||
BikeQueryWhere newWhereClause() {
|
||||
return BikeQueryWhere(this);
|
||||
}
|
||||
|
||||
Optional<Bike> parseRow(List row) {
|
||||
if (row.every((x) => x == null)) {
|
||||
return Optional.empty();
|
||||
}
|
||||
var model = Bike(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
make: fields.contains('make') ? (row[3] as String) : '',
|
||||
description: fields.contains('description') ? (row[4] as String) : '',
|
||||
familyFriendly:
|
||||
fields.contains('family_friendly') ? mapToBool(row[5]) : false,
|
||||
recalledAt: fields.contains('recalled_at')
|
||||
? mapToDateTime(row[6])
|
||||
: DateTime.parse("1970-01-01 00:00:00"),
|
||||
price: fields.contains('price') ? mapToDouble(row[7]) : 0.0,
|
||||
width: fields.contains('width') ? (row[8] as int) : 0);
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
||||
@override
|
||||
Optional<Bike> deserialize(List row) {
|
||||
return parseRow(row);
|
||||
}
|
||||
}
|
||||
|
||||
class BikeQueryWhere extends QueryWhere {
|
||||
BikeQueryWhere(BikeQuery query)
|
||||
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
||||
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
||||
make = StringSqlExpressionBuilder(query, 'make'),
|
||||
description = StringSqlExpressionBuilder(query, 'description'),
|
||||
familyFriendly = BooleanSqlExpressionBuilder(query, 'family_friendly'),
|
||||
recalledAt = DateTimeSqlExpressionBuilder(query, 'recalled_at'),
|
||||
price = NumericSqlExpressionBuilder<double>(query, 'price'),
|
||||
width = NumericSqlExpressionBuilder<int>(query, 'width');
|
||||
|
||||
final NumericSqlExpressionBuilder<int> id;
|
||||
|
||||
final DateTimeSqlExpressionBuilder createdAt;
|
||||
|
||||
final DateTimeSqlExpressionBuilder updatedAt;
|
||||
|
||||
final StringSqlExpressionBuilder make;
|
||||
|
||||
final StringSqlExpressionBuilder description;
|
||||
|
||||
final BooleanSqlExpressionBuilder familyFriendly;
|
||||
|
||||
final DateTimeSqlExpressionBuilder recalledAt;
|
||||
|
||||
final NumericSqlExpressionBuilder<double> price;
|
||||
|
||||
final NumericSqlExpressionBuilder<int> width;
|
||||
|
||||
@override
|
||||
List<SqlExpressionBuilder> get expressionBuilders {
|
||||
return [
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
make,
|
||||
description,
|
||||
familyFriendly,
|
||||
recalledAt,
|
||||
price,
|
||||
width
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
class BikeQueryValues extends MapQueryValues {
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {'price': 'double precision'};
|
||||
}
|
||||
|
||||
String? get id {
|
||||
return (values['id'] as String?);
|
||||
}
|
||||
|
||||
set id(String? value) => values['id'] = value;
|
||||
DateTime? get createdAt {
|
||||
return (values['created_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set createdAt(DateTime? value) => values['created_at'] = value;
|
||||
DateTime? get updatedAt {
|
||||
return (values['updated_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||
String get make {
|
||||
return (values['make'] as String);
|
||||
}
|
||||
|
||||
set make(String value) => values['make'] = value;
|
||||
String get description {
|
||||
return (values['description'] as String);
|
||||
}
|
||||
|
||||
set description(String value) => values['description'] = value;
|
||||
bool get familyFriendly {
|
||||
return (values['family_friendly'] as bool);
|
||||
}
|
||||
|
||||
set familyFriendly(bool value) => values['family_friendly'] = value;
|
||||
DateTime get recalledAt {
|
||||
return (values['recalled_at'] as DateTime);
|
||||
}
|
||||
|
||||
set recalledAt(DateTime value) => values['recalled_at'] = value;
|
||||
double get price {
|
||||
return double.tryParse((values['price'] as String)) ?? 0.0;
|
||||
}
|
||||
|
||||
set price(double value) => values['price'] = value.toString();
|
||||
int get width {
|
||||
return (values['width'] as int);
|
||||
}
|
||||
|
||||
set width(int value) => values['width'] = value;
|
||||
void copyFrom(Bike model) {
|
||||
createdAt = model.createdAt;
|
||||
updatedAt = model.updatedAt;
|
||||
make = model.make;
|
||||
description = model.description;
|
||||
familyFriendly = model.familyFriendly;
|
||||
recalledAt = model.recalledAt;
|
||||
price = model.price;
|
||||
width = model.width;
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// JsonModelGenerator
|
||||
// **************************************************************************
|
||||
|
||||
@generatedSerializable
|
||||
class Bike extends _Bike {
|
||||
Bike(
|
||||
{this.id,
|
||||
this.createdAt,
|
||||
this.updatedAt,
|
||||
required this.make,
|
||||
required this.description,
|
||||
required this.familyFriendly,
|
||||
required this.recalledAt,
|
||||
required this.price,
|
||||
required this.width});
|
||||
|
||||
/// A unique identifier corresponding to this item.
|
||||
@override
|
||||
String? id;
|
||||
|
||||
/// The time at which this item was created.
|
||||
@override
|
||||
DateTime? createdAt;
|
||||
|
||||
/// The last time at which this item was updated.
|
||||
@override
|
||||
DateTime? updatedAt;
|
||||
|
||||
@override
|
||||
String make;
|
||||
|
||||
@override
|
||||
String description;
|
||||
|
||||
@override
|
||||
bool familyFriendly;
|
||||
|
||||
@override
|
||||
DateTime recalledAt;
|
||||
|
||||
@override
|
||||
double price;
|
||||
|
||||
@override
|
||||
int width;
|
||||
|
||||
Bike copyWith(
|
||||
{String? id,
|
||||
DateTime? createdAt,
|
||||
DateTime? updatedAt,
|
||||
String? make,
|
||||
String? description,
|
||||
bool? familyFriendly,
|
||||
DateTime? recalledAt,
|
||||
double? price,
|
||||
int? width}) {
|
||||
return Bike(
|
||||
id: id ?? this.id,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
make: make ?? this.make,
|
||||
description: description ?? this.description,
|
||||
familyFriendly: familyFriendly ?? this.familyFriendly,
|
||||
recalledAt: recalledAt ?? this.recalledAt,
|
||||
price: price ?? this.price,
|
||||
width: width ?? this.width);
|
||||
}
|
||||
|
||||
@override
|
||||
bool operator ==(other) {
|
||||
return other is _Bike &&
|
||||
other.id == id &&
|
||||
other.createdAt == createdAt &&
|
||||
other.updatedAt == updatedAt &&
|
||||
other.make == make &&
|
||||
other.description == description &&
|
||||
other.familyFriendly == familyFriendly &&
|
||||
other.recalledAt == recalledAt &&
|
||||
other.price == price &&
|
||||
other.width == width;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return hashObjects([
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
make,
|
||||
description,
|
||||
familyFriendly,
|
||||
recalledAt,
|
||||
price,
|
||||
width
|
||||
]);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'Bike(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, make=$make, description=$description, familyFriendly=$familyFriendly, recalledAt=$recalledAt, price=$price, width=$width)';
|
||||
}
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
return BikeSerializer.toMap(this);
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const BikeSerializer bikeSerializer = BikeSerializer();
|
||||
|
||||
class BikeEncoder extends Converter<Bike, Map> {
|
||||
const BikeEncoder();
|
||||
|
||||
@override
|
||||
Map convert(Bike model) => BikeSerializer.toMap(model);
|
||||
}
|
||||
|
||||
class BikeDecoder extends Converter<Map, Bike> {
|
||||
const BikeDecoder();
|
||||
|
||||
@override
|
||||
Bike convert(Map map) => BikeSerializer.fromMap(map);
|
||||
}
|
||||
|
||||
class BikeSerializer extends Codec<Bike, Map> {
|
||||
const BikeSerializer();
|
||||
|
||||
@override
|
||||
BikeEncoder get encoder => const BikeEncoder();
|
||||
@override
|
||||
BikeDecoder get decoder => const BikeDecoder();
|
||||
static Bike fromMap(Map map) {
|
||||
return Bike(
|
||||
id: map['id'] as String?,
|
||||
createdAt: map['created_at'] != null
|
||||
? (map['created_at'] is DateTime
|
||||
? (map['created_at'] as DateTime)
|
||||
: DateTime.parse(map['created_at'].toString()))
|
||||
: null,
|
||||
updatedAt: map['updated_at'] != null
|
||||
? (map['updated_at'] is DateTime
|
||||
? (map['updated_at'] as DateTime)
|
||||
: DateTime.parse(map['updated_at'].toString()))
|
||||
: null,
|
||||
make: map['make'] as String,
|
||||
description: map['description'] as String,
|
||||
familyFriendly: map['family_friendly'] as bool,
|
||||
recalledAt: map['recalled_at'] != null
|
||||
? (map['recalled_at'] is DateTime
|
||||
? (map['recalled_at'] as DateTime)
|
||||
: DateTime.parse(map['recalled_at'].toString()))
|
||||
: DateTime.parse("1970-01-01 00:00:00"),
|
||||
price: map['price'] as double,
|
||||
width: map['width'] as int);
|
||||
}
|
||||
|
||||
static Map<String, dynamic> toMap(_Bike? model) {
|
||||
if (model == null) {
|
||||
throw FormatException("Required field [model] cannot be null");
|
||||
}
|
||||
return {
|
||||
'id': model.id,
|
||||
'created_at': model.createdAt?.toIso8601String(),
|
||||
'updated_at': model.updatedAt?.toIso8601String(),
|
||||
'make': model.make,
|
||||
'description': model.description,
|
||||
'family_friendly': model.familyFriendly,
|
||||
'recalled_at': model.recalledAt.toIso8601String(),
|
||||
'price': model.price,
|
||||
'width': model.width
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
abstract class BikeFields {
|
||||
static const List<String> allFields = <String>[
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
make,
|
||||
description,
|
||||
familyFriendly,
|
||||
recalledAt,
|
||||
price,
|
||||
width
|
||||
];
|
||||
|
||||
static const String id = 'id';
|
||||
|
||||
static const String createdAt = 'created_at';
|
||||
|
||||
static const String updatedAt = 'updated_at';
|
||||
|
||||
static const String make = 'make';
|
||||
|
||||
static const String description = 'description';
|
||||
|
||||
static const String familyFriendly = 'family_friendly';
|
||||
|
||||
static const String recalledAt = 'recalled_at';
|
||||
|
||||
static const String price = 'price';
|
||||
|
||||
static const String width = 'width';
|
||||
}
|
14
packages/orm/angel_orm_test/lib/src/models/boat.d.ts
vendored
Normal file
14
packages/orm/angel_orm_test/lib/src/models/boat.d.ts
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||
declare module 'angel3_orm_test' {
|
||||
interface Boat {
|
||||
id?: string;
|
||||
created_at?: any;
|
||||
updated_at?: any;
|
||||
make?: string;
|
||||
description?: string;
|
||||
family_friendly?: boolean;
|
||||
recalled_at?: any;
|
||||
price?: number;
|
||||
width?: number;
|
||||
}
|
||||
}
|
28
packages/orm/angel_orm_test/lib/src/models/boat.dart
Normal file
28
packages/orm/angel_orm_test/lib/src/models/boat.dart
Normal file
|
@ -0,0 +1,28 @@
|
|||
import 'package:angel3_orm/angel3_orm.dart';
|
||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||
import 'package:angel3_migration/angel3_migration.dart';
|
||||
import 'package:optional/optional.dart';
|
||||
|
||||
part 'boat.g.dart';
|
||||
|
||||
@Serializable(serializers: Serializers.all)
|
||||
@orm
|
||||
abstract class _Boat extends Model {
|
||||
@Column(defaultValue: '')
|
||||
String get make;
|
||||
|
||||
@Column(defaultValue: 'none')
|
||||
String get description;
|
||||
|
||||
@Column(defaultValue: false)
|
||||
bool get familyFriendly;
|
||||
|
||||
//@SerializableField(defaultValue: '1970-01-01 00:00:01')
|
||||
DateTime get recalledAt;
|
||||
|
||||
@Column(defaultValue: 0.0)
|
||||
double get price;
|
||||
|
||||
@Column(defaultValue: 0)
|
||||
int get width;
|
||||
}
|
439
packages/orm/angel_orm_test/lib/src/models/boat.g.dart
Normal file
439
packages/orm/angel_orm_test/lib/src/models/boat.g.dart
Normal file
|
@ -0,0 +1,439 @@
|
|||
// GENERATED CODE - DO NOT MODIFY BY HAND
|
||||
|
||||
part of 'boat.dart';
|
||||
|
||||
// **************************************************************************
|
||||
// MigrationGenerator
|
||||
// **************************************************************************
|
||||
|
||||
class BoatMigration extends Migration {
|
||||
@override
|
||||
void up(Schema schema) {
|
||||
schema.create('boats', (table) {
|
||||
table.serial('id').primaryKey();
|
||||
table.timeStamp('created_at');
|
||||
table.timeStamp('updated_at');
|
||||
table.varChar('make', length: 255).defaultsTo('');
|
||||
table.varChar('description', length: 255).defaultsTo('none');
|
||||
table.boolean('family_friendly').defaultsTo(false);
|
||||
table.timeStamp('recalled_at');
|
||||
table.double('price').defaultsTo(0.0);
|
||||
table.integer('width').defaultsTo(0);
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
void down(Schema schema) {
|
||||
schema.drop('boats');
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// OrmGenerator
|
||||
// **************************************************************************
|
||||
|
||||
class BoatQuery extends Query<Boat, BoatQueryWhere> {
|
||||
BoatQuery({Query? parent, Set<String>? trampoline}) : super(parent: parent) {
|
||||
trampoline ??= <String>{};
|
||||
trampoline.add(tableName);
|
||||
_where = BoatQueryWhere(this);
|
||||
}
|
||||
|
||||
@override
|
||||
final BoatQueryValues values = BoatQueryValues();
|
||||
|
||||
List<String> _selectedFields = [];
|
||||
|
||||
BoatQueryWhere? _where;
|
||||
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {'price': 'char'};
|
||||
}
|
||||
|
||||
@override
|
||||
String get tableName {
|
||||
return 'boats';
|
||||
}
|
||||
|
||||
@override
|
||||
List<String> get fields {
|
||||
const _fields = [
|
||||
'id',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'make',
|
||||
'description',
|
||||
'family_friendly',
|
||||
'recalled_at',
|
||||
'price',
|
||||
'width'
|
||||
];
|
||||
return _selectedFields.isEmpty
|
||||
? _fields
|
||||
: _fields.where((field) => _selectedFields.contains(field)).toList();
|
||||
}
|
||||
|
||||
BoatQuery select(List<String> selectedFields) {
|
||||
_selectedFields = selectedFields;
|
||||
return this;
|
||||
}
|
||||
|
||||
@override
|
||||
BoatQueryWhere? get where {
|
||||
return _where;
|
||||
}
|
||||
|
||||
@override
|
||||
BoatQueryWhere newWhereClause() {
|
||||
return BoatQueryWhere(this);
|
||||
}
|
||||
|
||||
Optional<Boat> parseRow(List row) {
|
||||
if (row.every((x) => x == null)) {
|
||||
return Optional.empty();
|
||||
}
|
||||
var model = Boat(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
make: fields.contains('make') ? (row[3] as String) : '',
|
||||
description: fields.contains('description') ? (row[4] as String) : '',
|
||||
familyFriendly:
|
||||
fields.contains('family_friendly') ? mapToBool(row[5]) : false,
|
||||
recalledAt: fields.contains('recalled_at')
|
||||
? mapToDateTime(row[6])
|
||||
: DateTime.parse("1970-01-01 00:00:00"),
|
||||
price: fields.contains('price') ? mapToDouble(row[7]) : 0.0,
|
||||
width: fields.contains('width') ? (row[8] as int) : 0);
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
||||
@override
|
||||
Optional<Boat> deserialize(List row) {
|
||||
return parseRow(row);
|
||||
}
|
||||
}
|
||||
|
||||
class BoatQueryWhere extends QueryWhere {
|
||||
BoatQueryWhere(BoatQuery query)
|
||||
: id = NumericSqlExpressionBuilder<int>(query, 'id'),
|
||||
createdAt = DateTimeSqlExpressionBuilder(query, 'created_at'),
|
||||
updatedAt = DateTimeSqlExpressionBuilder(query, 'updated_at'),
|
||||
make = StringSqlExpressionBuilder(query, 'make'),
|
||||
description = StringSqlExpressionBuilder(query, 'description'),
|
||||
familyFriendly = BooleanSqlExpressionBuilder(query, 'family_friendly'),
|
||||
recalledAt = DateTimeSqlExpressionBuilder(query, 'recalled_at'),
|
||||
price = NumericSqlExpressionBuilder<double>(query, 'price'),
|
||||
width = NumericSqlExpressionBuilder<int>(query, 'width');
|
||||
|
||||
final NumericSqlExpressionBuilder<int> id;
|
||||
|
||||
final DateTimeSqlExpressionBuilder createdAt;
|
||||
|
||||
final DateTimeSqlExpressionBuilder updatedAt;
|
||||
|
||||
final StringSqlExpressionBuilder make;
|
||||
|
||||
final StringSqlExpressionBuilder description;
|
||||
|
||||
final BooleanSqlExpressionBuilder familyFriendly;
|
||||
|
||||
final DateTimeSqlExpressionBuilder recalledAt;
|
||||
|
||||
final NumericSqlExpressionBuilder<double> price;
|
||||
|
||||
final NumericSqlExpressionBuilder<int> width;
|
||||
|
||||
@override
|
||||
List<SqlExpressionBuilder> get expressionBuilders {
|
||||
return [
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
make,
|
||||
description,
|
||||
familyFriendly,
|
||||
recalledAt,
|
||||
price,
|
||||
width
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
class BoatQueryValues extends MapQueryValues {
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {'price': 'double precision'};
|
||||
}
|
||||
|
||||
String? get id {
|
||||
return (values['id'] as String?);
|
||||
}
|
||||
|
||||
set id(String? value) => values['id'] = value;
|
||||
DateTime? get createdAt {
|
||||
return (values['created_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set createdAt(DateTime? value) => values['created_at'] = value;
|
||||
DateTime? get updatedAt {
|
||||
return (values['updated_at'] as DateTime?);
|
||||
}
|
||||
|
||||
set updatedAt(DateTime? value) => values['updated_at'] = value;
|
||||
String get make {
|
||||
return (values['make'] as String);
|
||||
}
|
||||
|
||||
set make(String value) => values['make'] = value;
|
||||
String get description {
|
||||
return (values['description'] as String);
|
||||
}
|
||||
|
||||
set description(String value) => values['description'] = value;
|
||||
bool get familyFriendly {
|
||||
return (values['family_friendly'] as bool);
|
||||
}
|
||||
|
||||
set familyFriendly(bool value) => values['family_friendly'] = value;
|
||||
DateTime get recalledAt {
|
||||
return (values['recalled_at'] as DateTime);
|
||||
}
|
||||
|
||||
set recalledAt(DateTime value) => values['recalled_at'] = value;
|
||||
double get price {
|
||||
return double.tryParse((values['price'] as String)) ?? 0.0;
|
||||
}
|
||||
|
||||
set price(double value) => values['price'] = value.toString();
|
||||
int get width {
|
||||
return (values['width'] as int);
|
||||
}
|
||||
|
||||
set width(int value) => values['width'] = value;
|
||||
void copyFrom(Boat model) {
|
||||
createdAt = model.createdAt;
|
||||
updatedAt = model.updatedAt;
|
||||
make = model.make;
|
||||
description = model.description;
|
||||
familyFriendly = model.familyFriendly;
|
||||
recalledAt = model.recalledAt;
|
||||
price = model.price;
|
||||
width = model.width;
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// JsonModelGenerator
|
||||
// **************************************************************************
|
||||
|
||||
@generatedSerializable
|
||||
class Boat extends _Boat {
|
||||
Boat(
|
||||
{this.id,
|
||||
this.createdAt,
|
||||
this.updatedAt,
|
||||
required this.make,
|
||||
required this.description,
|
||||
required this.familyFriendly,
|
||||
required this.recalledAt,
|
||||
required this.price,
|
||||
required this.width});
|
||||
|
||||
/// A unique identifier corresponding to this item.
|
||||
@override
|
||||
String? id;
|
||||
|
||||
/// The time at which this item was created.
|
||||
@override
|
||||
DateTime? createdAt;
|
||||
|
||||
/// The last time at which this item was updated.
|
||||
@override
|
||||
DateTime? updatedAt;
|
||||
|
||||
@override
|
||||
String make;
|
||||
|
||||
@override
|
||||
String description;
|
||||
|
||||
@override
|
||||
bool familyFriendly;
|
||||
|
||||
@override
|
||||
DateTime recalledAt;
|
||||
|
||||
@override
|
||||
double price;
|
||||
|
||||
@override
|
||||
int width;
|
||||
|
||||
Boat copyWith(
|
||||
{String? id,
|
||||
DateTime? createdAt,
|
||||
DateTime? updatedAt,
|
||||
String? make,
|
||||
String? description,
|
||||
bool? familyFriendly,
|
||||
DateTime? recalledAt,
|
||||
double? price,
|
||||
int? width}) {
|
||||
return Boat(
|
||||
id: id ?? this.id,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
updatedAt: updatedAt ?? this.updatedAt,
|
||||
make: make ?? this.make,
|
||||
description: description ?? this.description,
|
||||
familyFriendly: familyFriendly ?? this.familyFriendly,
|
||||
recalledAt: recalledAt ?? this.recalledAt,
|
||||
price: price ?? this.price,
|
||||
width: width ?? this.width);
|
||||
}
|
||||
|
||||
@override
|
||||
bool operator ==(other) {
|
||||
return other is _Boat &&
|
||||
other.id == id &&
|
||||
other.createdAt == createdAt &&
|
||||
other.updatedAt == updatedAt &&
|
||||
other.make == make &&
|
||||
other.description == description &&
|
||||
other.familyFriendly == familyFriendly &&
|
||||
other.recalledAt == recalledAt &&
|
||||
other.price == price &&
|
||||
other.width == width;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
return hashObjects([
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
make,
|
||||
description,
|
||||
familyFriendly,
|
||||
recalledAt,
|
||||
price,
|
||||
width
|
||||
]);
|
||||
}
|
||||
|
||||
@override
|
||||
String toString() {
|
||||
return 'Boat(id=$id, createdAt=$createdAt, updatedAt=$updatedAt, make=$make, description=$description, familyFriendly=$familyFriendly, recalledAt=$recalledAt, price=$price, width=$width)';
|
||||
}
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
return BoatSerializer.toMap(this);
|
||||
}
|
||||
}
|
||||
|
||||
// **************************************************************************
|
||||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const BoatSerializer boatSerializer = BoatSerializer();
|
||||
|
||||
class BoatEncoder extends Converter<Boat, Map> {
|
||||
const BoatEncoder();
|
||||
|
||||
@override
|
||||
Map convert(Boat model) => BoatSerializer.toMap(model);
|
||||
}
|
||||
|
||||
class BoatDecoder extends Converter<Map, Boat> {
|
||||
const BoatDecoder();
|
||||
|
||||
@override
|
||||
Boat convert(Map map) => BoatSerializer.fromMap(map);
|
||||
}
|
||||
|
||||
class BoatSerializer extends Codec<Boat, Map> {
|
||||
const BoatSerializer();
|
||||
|
||||
@override
|
||||
BoatEncoder get encoder => const BoatEncoder();
|
||||
@override
|
||||
BoatDecoder get decoder => const BoatDecoder();
|
||||
static Boat fromMap(Map map) {
|
||||
return Boat(
|
||||
id: map['id'] as String?,
|
||||
createdAt: map['created_at'] != null
|
||||
? (map['created_at'] is DateTime
|
||||
? (map['created_at'] as DateTime)
|
||||
: DateTime.parse(map['created_at'].toString()))
|
||||
: null,
|
||||
updatedAt: map['updated_at'] != null
|
||||
? (map['updated_at'] is DateTime
|
||||
? (map['updated_at'] as DateTime)
|
||||
: DateTime.parse(map['updated_at'].toString()))
|
||||
: null,
|
||||
make: map['make'] as String,
|
||||
description: map['description'] as String,
|
||||
familyFriendly: map['family_friendly'] as bool,
|
||||
recalledAt: map['recalled_at'] != null
|
||||
? (map['recalled_at'] is DateTime
|
||||
? (map['recalled_at'] as DateTime)
|
||||
: DateTime.parse(map['recalled_at'].toString()))
|
||||
: DateTime.parse("1970-01-01 00:00:00"),
|
||||
price: map['price'] as double,
|
||||
width: map['width'] as int);
|
||||
}
|
||||
|
||||
static Map<String, dynamic> toMap(_Boat? model) {
|
||||
if (model == null) {
|
||||
throw FormatException("Required field [model] cannot be null");
|
||||
}
|
||||
return {
|
||||
'id': model.id,
|
||||
'created_at': model.createdAt?.toIso8601String(),
|
||||
'updated_at': model.updatedAt?.toIso8601String(),
|
||||
'make': model.make,
|
||||
'description': model.description,
|
||||
'family_friendly': model.familyFriendly,
|
||||
'recalled_at': model.recalledAt.toIso8601String(),
|
||||
'price': model.price,
|
||||
'width': model.width
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
abstract class BoatFields {
|
||||
static const List<String> allFields = <String>[
|
||||
id,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
make,
|
||||
description,
|
||||
familyFriendly,
|
||||
recalledAt,
|
||||
price,
|
||||
width
|
||||
];
|
||||
|
||||
static const String id = 'id';
|
||||
|
||||
static const String createdAt = 'created_at';
|
||||
|
||||
static const String updatedAt = 'updated_at';
|
||||
|
||||
static const String make = 'make';
|
||||
|
||||
static const String description = 'description';
|
||||
|
||||
static const String familyFriendly = 'family_friendly';
|
||||
|
||||
static const String recalledAt = 'recalled_at';
|
||||
|
||||
static const String price = 'price';
|
||||
|
||||
static const String width = 'width';
|
||||
}
|
|
@ -122,8 +122,12 @@ class BookQuery extends Query<Book, BookQueryWhere> {
|
|||
}
|
||||
var model = Book(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
name: fields.contains('name') ? (row[5] as String?) : null);
|
||||
if (row.length > 6) {
|
||||
var modelOpt = AuthorQuery().parseRow(row.skip(6).take(4).toList());
|
||||
|
@ -285,8 +289,12 @@ class AuthorQuery extends Query<Author, AuthorQueryWhere> {
|
|||
}
|
||||
var model = Author(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
name: fields.contains('name') ? (row[3] as String?) : null);
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
|
|
@ -91,15 +91,20 @@ class CarQuery extends Query<Car, CarQueryWhere> {
|
|||
}
|
||||
var model = Car(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
make: fields.contains('make') ? (row[3] as String?) : null,
|
||||
description:
|
||||
fields.contains('description') ? (row[4] as String?) : null,
|
||||
familyFriendly:
|
||||
fields.contains('family_friendly') ? mapToBool(row[5]) : null,
|
||||
recalledAt:
|
||||
fields.contains('recalled_at') ? mapToDateTime(row[6]) : null);
|
||||
recalledAt: fields.contains('recalled_at')
|
||||
? mapToNullableDateTime(row[6])
|
||||
: null);
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
||||
|
|
|
@ -101,8 +101,12 @@ class NumbersQuery extends Query<Numbers, NumbersQueryWhere> {
|
|||
}
|
||||
var model = Numbers(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
two: fields.contains('two') ? (row[3] as int?) : null);
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
@ -218,8 +222,12 @@ class AlphabetQuery extends Query<Alphabet, AlphabetQueryWhere> {
|
|||
}
|
||||
var model = Alphabet(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
value: fields.contains('value') ? (row[3] as String?) : null);
|
||||
if (row.length > 5) {
|
||||
var modelOpt = NumbersQuery().parseRow(row.skip(5).take(4).toList());
|
||||
|
|
|
@ -81,8 +81,12 @@ class HasCarQuery extends Query<HasCar, HasCarQueryWhere> {
|
|||
}
|
||||
var model = HasCar(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
type: fields.contains('type')
|
||||
? row[3] == null
|
||||
? null
|
||||
|
|
|
@ -31,8 +31,7 @@ class FootMigration extends Migration {
|
|||
table.timeStamp('created_at');
|
||||
table.timeStamp('updated_at');
|
||||
table.integer('leg_id');
|
||||
table.declareColumn(
|
||||
'n_toes', Column(type: ColumnType('decimal'), length: 255));
|
||||
table.double('n_toes');
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -111,8 +110,12 @@ class LegQuery extends Query<Leg, LegQueryWhere> {
|
|||
}
|
||||
var model = Leg(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
name: fields.contains('name') ? (row[3] as String?) : null);
|
||||
if (row.length > 4) {
|
||||
var modelOpt = FootQuery().parseRow(row.skip(4).take(5).toList());
|
||||
|
@ -240,8 +243,12 @@ class FootQuery extends Query<Foot, FootQueryWhere> {
|
|||
}
|
||||
var model = Foot(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
legId: fields.contains('leg_id') ? (row[3] as int?) : null,
|
||||
nToes: fields.contains('n_toes') ? mapToDouble(row[4]) : null);
|
||||
return Optional.of(model);
|
||||
|
@ -280,7 +287,7 @@ class FootQueryWhere extends QueryWhere {
|
|||
class FootQueryValues extends MapQueryValues {
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {'n_toes': 'decimal'};
|
||||
return {'n_toes': 'double precision'};
|
||||
}
|
||||
|
||||
String? get id {
|
||||
|
@ -304,7 +311,7 @@ class FootQueryValues extends MapQueryValues {
|
|||
|
||||
set legId(int? value) => values['leg_id'] = value;
|
||||
double? get nToes {
|
||||
return double.tryParse((values['n_toes'] as String));
|
||||
return double.tryParse((values['n_toes'] as String)) ?? 0.0;
|
||||
}
|
||||
|
||||
set nToes(double? value) => values['n_toes'] = value.toString();
|
||||
|
|
|
@ -115,10 +115,16 @@ class OrderQuery extends Query<Order, OrderQueryWhere> {
|
|||
}
|
||||
var model = Order(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
employeeId: fields.contains('employee_id') ? (row[4] as int?) : null,
|
||||
orderDate: fields.contains('order_date') ? mapToDateTime(row[5]) : null,
|
||||
orderDate: fields.contains('order_date')
|
||||
? mapToNullableDateTime(row[5])
|
||||
: null,
|
||||
shipperId: fields.contains('shipper_id') ? (row[6] as int?) : null);
|
||||
if (row.length > 7) {
|
||||
var modelOpt = CustomerQuery().parseRow(row.skip(7).take(3).toList());
|
||||
|
@ -284,9 +290,12 @@ class CustomerQuery extends Query<Customer, CustomerQueryWhere> {
|
|||
}
|
||||
var model = Customer(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt:
|
||||
fields.contains('updated_at') ? mapToDateTime(row[2]) : null);
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null);
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,5 +22,5 @@ class _PersonWithLastOrder {
|
|||
String? lastOrderName;
|
||||
|
||||
@Column(expression: 'po.price')
|
||||
int? lastOrderPrice;
|
||||
double? lastOrderPrice;
|
||||
}
|
||||
|
|
|
@ -82,8 +82,12 @@ class PersonQuery extends Query<Person, PersonQueryWhere> {
|
|||
}
|
||||
var model = Person(
|
||||
id: fields.contains('id') ? row[0].toString() : null,
|
||||
createdAt: fields.contains('created_at') ? mapToDateTime(row[1]) : null,
|
||||
updatedAt: fields.contains('updated_at') ? mapToDateTime(row[2]) : null,
|
||||
createdAt: fields.contains('created_at')
|
||||
? mapToNullableDateTime(row[1])
|
||||
: null,
|
||||
updatedAt: fields.contains('updated_at')
|
||||
? mapToNullableDateTime(row[2])
|
||||
: null,
|
||||
name: fields.contains('name') ? (row[3] as String?) : null,
|
||||
age: fields.contains('age') ? (row[4] as int?) : null);
|
||||
return Optional.of(model);
|
||||
|
@ -179,7 +183,7 @@ class PersonWithLastOrderQuery
|
|||
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {};
|
||||
return {'last_order_price': 'char'};
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -219,7 +223,7 @@ class PersonWithLastOrderQuery
|
|||
lastOrderName:
|
||||
fields.contains('last_order_name') ? (row[1] as String?) : null,
|
||||
lastOrderPrice:
|
||||
fields.contains('last_order_price') ? (row[2] as int?) : null);
|
||||
fields.contains('last_order_price') ? mapToDouble(row[2]) : null);
|
||||
return Optional.of(model);
|
||||
}
|
||||
|
||||
|
@ -244,7 +248,7 @@ class PersonWithLastOrderQueryWhere extends QueryWhere {
|
|||
class PersonWithLastOrderQueryValues extends MapQueryValues {
|
||||
@override
|
||||
Map<String, String> get casts {
|
||||
return {};
|
||||
return {'last_order_price': 'double precision'};
|
||||
}
|
||||
|
||||
String? get name {
|
||||
|
@ -333,10 +337,10 @@ class PersonWithLastOrder extends _PersonWithLastOrder {
|
|||
String? lastOrderName;
|
||||
|
||||
@override
|
||||
int? lastOrderPrice;
|
||||
double? lastOrderPrice;
|
||||
|
||||
PersonWithLastOrder copyWith(
|
||||
{String? name, String? lastOrderName, int? lastOrderPrice}) {
|
||||
{String? name, String? lastOrderName, double? lastOrderPrice}) {
|
||||
return PersonWithLastOrder(
|
||||
name: name ?? this.name,
|
||||
lastOrderName: lastOrderName ?? this.lastOrderName,
|
||||
|
@ -474,7 +478,7 @@ class PersonWithLastOrderSerializer extends Codec<PersonWithLastOrder, Map> {
|
|||
return PersonWithLastOrder(
|
||||
name: map['name'] as String?,
|
||||
lastOrderName: map['last_order_name'] as String?,
|
||||
lastOrderPrice: map['last_order_price'] as int?);
|
||||
lastOrderPrice: map['last_order_price'] as double?);
|
||||
}
|
||||
|
||||
static Map<String, dynamic> toMap(_PersonWithLastOrder? model) {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue