Updated ORM insert for MySQL
This commit is contained in:
parent
bab4e5eaf6
commit
cd31c040ba
31 changed files with 226 additions and 98 deletions
|
@ -35,6 +35,6 @@ dev_dependencies:
|
|||
io: ^1.0.0
|
||||
test: ^1.17.5
|
||||
lints: ^1.0.0
|
||||
dependency_overrides:
|
||||
angel3_container:
|
||||
path: ../container/angel_container
|
||||
#dependency_overrides:
|
||||
# angel3_container:
|
||||
# path: ../container/angel_container
|
||||
|
|
|
@ -26,8 +26,9 @@ class _FakeExecutor extends QueryExecutor {
|
|||
|
||||
@override
|
||||
Future<List<List>> query(
|
||||
String tableName, String? query, Map<String, dynamic> substitutionValues,
|
||||
[returningFields = const []]) async {
|
||||
String tableName, String query, Map<String, dynamic> substitutionValues,
|
||||
{String returningQuery = '',
|
||||
List<String> returningFields = const []}) async {
|
||||
var now = DateTime.now();
|
||||
print(
|
||||
'_FakeExecutor received query: $query and values: $substitutionValues');
|
||||
|
@ -40,6 +41,11 @@ class _FakeExecutor extends QueryExecutor {
|
|||
Future<T> transaction<T>(FutureOr<T> Function(QueryExecutor) f) {
|
||||
throw UnsupportedError('Transactions are not supported.');
|
||||
}
|
||||
|
||||
final Dialect _dialect = const PostgreSQLDialect();
|
||||
|
||||
@override
|
||||
Dialect get dialect => _dialect;
|
||||
}
|
||||
|
||||
@orm
|
||||
|
|
|
@ -13,3 +13,4 @@ export 'src/query.dart';
|
|||
export 'src/relations.dart';
|
||||
export 'src/union.dart';
|
||||
export 'src/util.dart';
|
||||
export 'src/dialect/dialect.dart';
|
||||
|
|
25
packages/orm/angel_orm/lib/src/dialect/dialect.dart
Normal file
25
packages/orm/angel_orm/lib/src/dialect/dialect.dart
Normal file
|
@ -0,0 +1,25 @@
|
|||
abstract class Dialect {
|
||||
bool get cteSupport;
|
||||
|
||||
bool get writableCteSupport;
|
||||
}
|
||||
|
||||
class MySQLDialect implements Dialect {
|
||||
const MySQLDialect();
|
||||
|
||||
@override
|
||||
bool get cteSupport => true;
|
||||
|
||||
@override
|
||||
bool get writableCteSupport => false;
|
||||
}
|
||||
|
||||
class PostgreSQLDialect implements Dialect {
|
||||
const PostgreSQLDialect();
|
||||
|
||||
@override
|
||||
bool get cteSupport => true;
|
||||
|
||||
@override
|
||||
bool get writableCteSupport => true;
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
import 'dart:async';
|
||||
import 'package:angel3_orm/angel3_orm.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
|
||||
import 'annotations.dart';
|
||||
|
@ -298,7 +299,7 @@ abstract class Query<T, Where extends QueryWhere> extends QueryBase<T> {
|
|||
}
|
||||
return ss;
|
||||
}));
|
||||
_joins.forEach((j) {
|
||||
for (var j in _joins) {
|
||||
var c = compiledJoins[j] = j.compile(trampoline);
|
||||
//if (c != null) {
|
||||
if (c != '') {
|
||||
|
@ -310,7 +311,7 @@ abstract class Query<T, Where extends QueryWhere> extends QueryBase<T> {
|
|||
f.add('NULL');
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
if (withFields) b.write(f.join(', '));
|
||||
fromQuery ??= tableName;
|
||||
|
@ -354,7 +355,7 @@ abstract class Query<T, Where extends QueryWhere> extends QueryBase<T> {
|
|||
if (_joins.isEmpty) {
|
||||
return executor
|
||||
.query(tableName, sql, substitutionValues,
|
||||
fields.map(adornWithTableName).toList())
|
||||
returningFields: fields.map(adornWithTableName).toList())
|
||||
.then((it) => deserializeList(it));
|
||||
} else {
|
||||
return executor.transaction((tx) async {
|
||||
|
@ -379,14 +380,23 @@ abstract class Query<T, Where extends QueryWhere> extends QueryBase<T> {
|
|||
if (insertion == '') {
|
||||
throw StateError('No values have been specified for update.');
|
||||
} else {
|
||||
// TODO: How to do this in a non-Postgres DB?
|
||||
var returning = fields.map(adornWithTableName).join(', ');
|
||||
var sql = compile({});
|
||||
sql = 'WITH $tableName as ($insertion RETURNING $returning) ' + sql;
|
||||
var returningSql = sql;
|
||||
if (executor.dialect is PostgreSQLDialect) {
|
||||
var returning = fields.map(adornWithTableName).join(', ');
|
||||
sql = 'WITH $tableName as ($insertion RETURNING $returning) ' + sql;
|
||||
} else if (executor.dialect is MySQLDialect) {
|
||||
sql = '$insertion';
|
||||
} else {
|
||||
_log.fine("Unsupported database dialect.");
|
||||
}
|
||||
|
||||
//_log.fine("Insert Query = $sql");
|
||||
_log.fine("Insert Query = $sql");
|
||||
|
||||
return executor.query(tableName, sql, substitutionValues).then((it) {
|
||||
return executor
|
||||
.query(tableName, sql, substitutionValues,
|
||||
returningQuery: returningSql)
|
||||
.then((it) {
|
||||
// Return SQL execution results
|
||||
return it.isEmpty ? Optional.empty() : deserialize(it.first);
|
||||
});
|
||||
|
|
|
@ -45,11 +45,11 @@ abstract class QueryBase<T> {
|
|||
List<T> deserializeList(List<List<dynamic>> it) {
|
||||
var optResult = it.map(deserialize).toList();
|
||||
var result = <T>[];
|
||||
optResult.forEach((element) {
|
||||
for (var element in optResult) {
|
||||
element.ifPresent((item) {
|
||||
result.add(item);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -1,15 +1,23 @@
|
|||
import 'dart:async';
|
||||
|
||||
import '../angel3_orm.dart';
|
||||
|
||||
/// An abstract interface that performs queries.
|
||||
///
|
||||
/// This class should be implemented.
|
||||
abstract class QueryExecutor {
|
||||
const QueryExecutor();
|
||||
|
||||
Dialect get dialect;
|
||||
|
||||
/// Executes a single query.
|
||||
Future<List<List>> query(
|
||||
String tableName, String query, Map<String, dynamic> substitutionValues,
|
||||
[List<String> returningFields = const []]);
|
||||
String tableName,
|
||||
String query,
|
||||
Map<String, dynamic> substitutionValues, {
|
||||
String returningQuery = '',
|
||||
List<String> returningFields = const [],
|
||||
});
|
||||
|
||||
/// Enters a database transaction, performing the actions within,
|
||||
/// and returning the results of [f].
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import 'package:angel3_migration/angel3_migration.dart';
|
||||
import 'package:angel3_model/angel3_model.dart';
|
||||
import 'package:angel3_orm/angel3_orm.dart';
|
||||
import 'package:angel3_orm_mysql/angel3_orm_mysql.dart';
|
||||
import 'package:angel3_serialize/angel3_serialize.dart';
|
||||
//import 'package:galileo_sqljocky5/sqljocky.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:galileo_sqljocky5/sqljocky.dart';
|
||||
import 'package:mysql1/mysql1.dart';
|
||||
import 'package:optional/optional.dart';
|
||||
part 'main.g.dart';
|
||||
|
||||
|
@ -15,9 +15,19 @@ void main() async {
|
|||
..onRecord.listen(print);
|
||||
|
||||
var settings = ConnectionSettings(
|
||||
db: 'angel_orm_test', user: 'angel_orm_test', password: 'angel_orm_test');
|
||||
host: 'localhost',
|
||||
port: 3306,
|
||||
db: 'orm_test',
|
||||
user: 'Test',
|
||||
password: 'Test123*');
|
||||
var connection = await MySqlConnection.connect(settings);
|
||||
var logger = Logger('angel_orm_mysql');
|
||||
|
||||
var results = await connection.query('select name, is_complete from todos');
|
||||
//await connection.close();
|
||||
|
||||
print("End");
|
||||
|
||||
var logger = Logger('orm_mysql');
|
||||
var executor = MySqlExecutor(connection, logger: logger);
|
||||
|
||||
var query = TodoQuery();
|
||||
|
|
|
@ -69,7 +69,7 @@ class TodoQuery extends Query<Todo, TodoQueryWhere> {
|
|||
if (row.every((x) => x == null)) return null;
|
||||
var model = Todo(
|
||||
id: row[0].toString(),
|
||||
isComplete: (row[1] as bool?),
|
||||
isComplete: (row[1] as int?) != 0,
|
||||
text: (row[2] as String?),
|
||||
createdAt: (row[3] as DateTime?),
|
||||
updatedAt: (row[4] as DateTime?));
|
||||
|
|
|
@ -1,26 +1,33 @@
|
|||
import 'dart:async';
|
||||
import 'package:angel3_orm/angel3_orm.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
// import 'package:pool/pool.dart';
|
||||
import 'package:galileo_sqljocky5/public/connection/connection.dart';
|
||||
import 'package:galileo_sqljocky5/sqljocky.dart';
|
||||
import 'package:mysql1/mysql1.dart';
|
||||
|
||||
class MySqlExecutor extends QueryExecutor {
|
||||
/// An optional [Logger] to write to.
|
||||
final Logger? logger;
|
||||
|
||||
final Querier? _connection;
|
||||
final MySqlConnection _connection;
|
||||
|
||||
MySqlExecutor(this._connection, {this.logger});
|
||||
|
||||
final Dialect _dialect = const MySQLDialect();
|
||||
|
||||
@override
|
||||
Dialect get dialect => _dialect;
|
||||
|
||||
Future<void> close() {
|
||||
return _connection.close();
|
||||
/*
|
||||
if (_connection is MySqlConnection) {
|
||||
return (_connection as MySqlConnection).close();
|
||||
} else {
|
||||
return Future.value();
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
/*
|
||||
Future<Transaction> _startTransaction() {
|
||||
if (_connection is Transaction) {
|
||||
return Future.value(_connection as Transaction?);
|
||||
|
@ -69,7 +76,52 @@ class MySqlExecutor extends QueryExecutor {
|
|||
});
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
@override
|
||||
Future<List<List>> query(
|
||||
String tableName, String query, Map<String, dynamic> substitutionValues,
|
||||
{String returningQuery = '',
|
||||
List<String> returningFields = const []}) async {
|
||||
// Change @id -> ?
|
||||
for (var name in substitutionValues.keys) {
|
||||
query = query.replaceAll('@$name', '?');
|
||||
}
|
||||
|
||||
var params = substitutionValues.values.toList();
|
||||
|
||||
logger?.fine('Query: $query');
|
||||
logger?.fine('Values: $params');
|
||||
logger?.fine('Returning Query: $returningQuery');
|
||||
|
||||
if (returningQuery.isNotEmpty) {
|
||||
// Handle insert, update and delete
|
||||
// Retrieve back the inserted record
|
||||
var result = await _connection.query(query, params);
|
||||
query = '$returningQuery where id = ?';
|
||||
params = [result.insertId];
|
||||
}
|
||||
|
||||
// Handle select
|
||||
return _connection.query(query, params).then((results) {
|
||||
return results.map((r) => r.toList()).toList();
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
Future<T> transaction<T>(FutureOr<T> Function(QueryExecutor) f) async {
|
||||
return f(this);
|
||||
/*
|
||||
if (_connection is! MySqlConnection) {
|
||||
return await f(this);
|
||||
}
|
||||
|
||||
await _connection.transaction((context) async {
|
||||
var executor = MySqlExecutor(context, logger: logger);
|
||||
});
|
||||
*/
|
||||
}
|
||||
/*
|
||||
@override
|
||||
Future<T> transaction<T>(FutureOr<T> Function(QueryExecutor) f) async {
|
||||
if (_connection is Transaction) {
|
||||
|
@ -88,4 +140,5 @@ class MySqlExecutor extends QueryExecutor {
|
|||
rethrow;
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
|
|
@ -8,8 +8,7 @@ environment:
|
|||
dependencies:
|
||||
angel3_orm: ^4.0.0
|
||||
logging: ^1.0.0
|
||||
pool: ^1.5.0
|
||||
galileo_sqljocky5: ^3.0.0
|
||||
mysql1: ^0.19.0
|
||||
optional: ^6.0.0
|
||||
dev_dependencies:
|
||||
angel3_migration: ^4.0.0
|
||||
|
|
|
@ -10,7 +10,7 @@ void main() {
|
|||
if (rec.stackTrace != null) print(rec.stackTrace);
|
||||
});
|
||||
|
||||
group('postgresql', () {
|
||||
group('mysql', () {
|
||||
group('belongsTo',
|
||||
() => belongsToTests(my(['author', 'book']), close: closeMy));
|
||||
group(
|
||||
|
|
|
@ -3,7 +3,7 @@ import 'dart:io';
|
|||
import 'package:angel3_orm/angel3_orm.dart';
|
||||
import 'package:angel3_orm_mysql/angel3_orm_mysql.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:galileo_sqljocky5/sqljocky.dart';
|
||||
import 'package:mysql1/mysql1.dart';
|
||||
|
||||
FutureOr<QueryExecutor> Function() my(Iterable<String> schemas) {
|
||||
return () => connectToMySql(schemas);
|
||||
|
@ -14,15 +14,15 @@ Future<void> closeMy(QueryExecutor executor) =>
|
|||
|
||||
Future<MySqlExecutor> connectToMySql(Iterable<String> schemas) async {
|
||||
var settings = ConnectionSettings(
|
||||
db: 'angel_orm_test',
|
||||
user: Platform.environment['MYSQL_USERNAME'] ?? 'angel_orm_test',
|
||||
password: Platform.environment['MYSQL_PASSWORD'] ?? 'angel_orm_test');
|
||||
db: 'orm_test',
|
||||
host: "localhost",
|
||||
user: Platform.environment['MYSQL_USERNAME'] ?? 'Test',
|
||||
password: Platform.environment['MYSQL_PASSWORD'] ?? 'Test123*');
|
||||
var connection = await MySqlConnection.connect(settings);
|
||||
var logger = Logger('angel_orm_mysql');
|
||||
var logger = Logger('orm_mysql');
|
||||
|
||||
for (var s in schemas) {
|
||||
await connection
|
||||
.execute(await File('test/migrations/$s.sql').readAsString());
|
||||
await connection.query(await File('test/migrations/$s.sql').readAsString());
|
||||
}
|
||||
|
||||
return MySqlExecutor(connection, logger: logger);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "authors" (
|
||||
CREATE TEMPORARY TABLE authors (
|
||||
id serial PRIMARY KEY,
|
||||
name varchar(255) UNIQUE NOT NULL,
|
||||
created_at timestamp,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "books" (
|
||||
CREATE TEMPORARY TABLE books (
|
||||
id serial PRIMARY KEY,
|
||||
author_id int NOT NULL,
|
||||
partner_author_id int,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "cars" (
|
||||
CREATE TEMPORARY TABLE cars (
|
||||
id serial PRIMARY KEY,
|
||||
make varchar(255) NOT NULL,
|
||||
description TEXT NOT NULL,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "feet" (
|
||||
CREATE TEMPORARY TABLE feet (
|
||||
id serial PRIMARY KEY,
|
||||
leg_id int NOT NULL,
|
||||
n_toes int NOT NULL,
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
CREATE TEMPORARY TABLE "fruits" (
|
||||
"id" serial,
|
||||
"tree_id" int,
|
||||
"common_name" varchar,
|
||||
"created_at" timestamp,
|
||||
"updated_at" timestamp,
|
||||
CREATE TEMPORARY TABLE fruits (
|
||||
id serial,
|
||||
tree_id int,
|
||||
common_name varchar(255),
|
||||
created_at timestamp,
|
||||
updated_at timestamp,
|
||||
PRIMARY KEY(id)
|
||||
);
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "has_cars" (
|
||||
CREATE TEMPORARY TABLE has_cars (
|
||||
id serial PRIMARY KEY,
|
||||
type int not null,
|
||||
created_at timestamp,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "has_maps" (
|
||||
CREATE TEMPORARY TABLE has_maps (
|
||||
id serial PRIMARY KEY,
|
||||
value jsonb not null,
|
||||
list jsonb not null,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "legs" (
|
||||
CREATE TEMPORARY TABLE legs (
|
||||
id serial PRIMARY KEY,
|
||||
name varchar(255) NOT NULL,
|
||||
created_at timestamp,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
CREATE TEMPORARY TABLE "numbas" (
|
||||
"i" int,
|
||||
"parent" int references weird_joins(id),
|
||||
CREATE TEMPORARY TABLE numbas (
|
||||
i int,
|
||||
parent int,
|
||||
created_at TIMESTAMP,
|
||||
updated_at TIMESTAMP,
|
||||
PRIMARY KEY(i)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
CREATE TEMPORARY TABLE "roles" (
|
||||
"id" serial PRIMARY KEY,
|
||||
"name" varchar(255),
|
||||
"created_at" timestamp,
|
||||
"updated_at" timestamp
|
||||
CREATE TEMPORARY TABLE roles (
|
||||
id serial PRIMARY KEY,
|
||||
name varchar(255),
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
);
|
|
@ -1,7 +1,7 @@
|
|||
CREATE TEMPORARY TABLE "songs" (
|
||||
"id" serial,
|
||||
"weird_join_id" int references weird_joins(id),
|
||||
"title" varchar(255),
|
||||
CREATE TEMPORARY TABLE songs (
|
||||
id serial,
|
||||
weird_join_id int,
|
||||
title varchar(255),
|
||||
created_at TIMESTAMP,
|
||||
updated_at TIMESTAMP,
|
||||
PRIMARY KEY(id)
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
CREATE TEMPORARY TABLE "trees" (
|
||||
"id" serial,
|
||||
"rings" smallint UNIQUE,
|
||||
"created_at" timestamp,
|
||||
"updated_at" timestamp,
|
||||
UNIQUE(rings),
|
||||
CREATE TEMPORARY TABLE trees (
|
||||
id serial,
|
||||
rings smallint UNIQUE,
|
||||
created_at timestamp,
|
||||
updated_at timestamp,
|
||||
PRIMARY KEY(id)
|
||||
);
|
|
@ -1,4 +1,4 @@
|
|||
CREATE TEMPORARY TABLE "unorthodoxes" (
|
||||
"name" varchar(255),
|
||||
CREATE TEMPORARY TABLE unorthodoxes (
|
||||
name varchar(255),
|
||||
PRIMARY KEY(name)
|
||||
);
|
|
@ -1,8 +1,8 @@
|
|||
CREATE TEMPORARY TABLE "users" (
|
||||
"id" serial PRIMARY KEY,
|
||||
"username" varchar(255),
|
||||
"password" varchar(255),
|
||||
"email" varchar(255),
|
||||
"created_at" timestamp,
|
||||
"updated_at" timestamp
|
||||
CREATE TEMPORARY TABLE users (
|
||||
id serial PRIMARY KEY,
|
||||
username varchar(255),
|
||||
password varchar(255),
|
||||
email varchar(255),
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
);
|
|
@ -1,7 +1,7 @@
|
|||
CREATE TEMPORARY TABLE "role_users" (
|
||||
"id" serial PRIMARY KEY,
|
||||
"user_id" int NOT NULL,
|
||||
"role_id" int NOT NULL,
|
||||
"created_at" timestamp,
|
||||
"updated_at" timestamp
|
||||
CREATE TEMPORARY TABLE role_users (
|
||||
id serial PRIMARY KEY,
|
||||
user_id int NOT NULL,
|
||||
role_id int NOT NULL,
|
||||
created_at timestamp,
|
||||
updated_at timestamp
|
||||
);
|
|
@ -1,13 +1,15 @@
|
|||
CREATE TEMPORARY TABLE "weird_joins" (
|
||||
"id" serial,
|
||||
"join_name" varchar(255) references unorthodoxes(name),
|
||||
CREATE TEMPORARY TABLE weird_joins (
|
||||
id serial,
|
||||
join_name varchar(255),
|
||||
PRIMARY KEY(id)
|
||||
);
|
||||
CREATE TEMPORARY TABLE "foos" (
|
||||
"bar" varchar(255),
|
||||
|
||||
CREATE TEMPORARY TABLE foos (
|
||||
bar varchar(255) not null UNIQUE,
|
||||
PRIMARY KEY(bar)
|
||||
);
|
||||
CREATE TEMPORARY TABLE "foo_pivots" (
|
||||
"weird_join_id" int references weird_joins(id),
|
||||
"foo_bar" varchar(255) references foos(bar)
|
||||
|
||||
CREATE TEMPORARY TABLE foo_pivots (
|
||||
weird_join_id int,
|
||||
foo_bar varchar(255)
|
||||
);
|
|
@ -16,6 +16,11 @@ class PostgreSqlExecutor extends QueryExecutor {
|
|||
this.logger = logger ?? Logger('PostgreSqlExecutor');
|
||||
}
|
||||
|
||||
final Dialect _dialect = const PostgreSQLDialect();
|
||||
|
||||
@override
|
||||
Dialect get dialect => _dialect;
|
||||
|
||||
/// The underlying connection.
|
||||
PostgreSQLExecutionContext get connection => _connection;
|
||||
|
||||
|
@ -31,8 +36,8 @@ class PostgreSqlExecutor extends QueryExecutor {
|
|||
@override
|
||||
Future<PostgreSQLResult> query(
|
||||
String tableName, String query, Map<String, dynamic> substitutionValues,
|
||||
[List<String>? returningFields]) {
|
||||
if (returningFields != null && returningFields.isNotEmpty) {
|
||||
{String returningQuery = '', List<String> returningFields = const []}) {
|
||||
if (returningFields.isNotEmpty) {
|
||||
var fields = returningFields.join(', ');
|
||||
var returning = 'RETURNING $fields';
|
||||
query = '$query $returning';
|
||||
|
@ -119,6 +124,11 @@ class PostgreSqlExecutorPool extends QueryExecutor {
|
|||
assert(size > 0, 'Connection pool cannot be empty.');
|
||||
}
|
||||
|
||||
final Dialect _dialect = const PostgreSQLDialect();
|
||||
|
||||
@override
|
||||
Dialect get dialect => _dialect;
|
||||
|
||||
/// Closes all connections.
|
||||
Future close() async {
|
||||
await _pool.close();
|
||||
|
@ -151,11 +161,11 @@ class PostgreSqlExecutorPool extends QueryExecutor {
|
|||
@override
|
||||
Future<PostgreSQLResult> query(
|
||||
String tableName, String query, Map<String, dynamic> substitutionValues,
|
||||
[List<String>? returningFields]) {
|
||||
{String returningQuery = '', List<String> returningFields = const []}) {
|
||||
return _pool.withResource(() async {
|
||||
var executor = await _next();
|
||||
return executor.query(
|
||||
tableName, query, substitutionValues, returningFields);
|
||||
return executor.query(tableName, query, substitutionValues,
|
||||
returningFields: returningFields);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,11 @@ class PostgreSqlPoolExecutor extends QueryExecutor {
|
|||
this.logger = logger ?? Logger('PostgreSqlPoolExecutor');
|
||||
}
|
||||
|
||||
final Dialect _dialect = const PostgreSQLDialect();
|
||||
|
||||
@override
|
||||
Dialect get dialect => _dialect;
|
||||
|
||||
/// The underlying connection pooling.
|
||||
PgPool get pool => _pool;
|
||||
|
||||
|
@ -29,7 +34,7 @@ class PostgreSqlPoolExecutor extends QueryExecutor {
|
|||
@override
|
||||
Future<PostgreSQLResult> query(
|
||||
String tableName, String query, Map<String, dynamic> substitutionValues,
|
||||
[List<String> returningFields = const []]) {
|
||||
{String returningQuery = '', List<String> returningFields = const []}) {
|
||||
if (returningFields.isNotEmpty) {
|
||||
var fields = returningFields.join(', ');
|
||||
var returning = 'RETURNING $fields';
|
||||
|
|
Loading…
Reference in a new issue