gen@2.4.4
This commit is contained in:
parent
0adde73854
commit
e5d9a0851e
18 changed files with 206 additions and 206 deletions
|
@ -84,7 +84,8 @@ class Serializable {
|
|||
const Serializable(
|
||||
{this.serializers: const [Serializers.map, Serializers.json],
|
||||
this.autoSnakeCaseNames: true,
|
||||
@deprecated this.autoIdAndDateFields: true,
|
||||
// ignore: deprecated_member_use_from_same_package
|
||||
@deprecated this.autoIdAndDateFields = true,
|
||||
this.includeAnnotations: const []});
|
||||
|
||||
/// A list of enabled serialization modes.
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
# 2.4.4
|
||||
* Remove unnecessary `new` and `const`.
|
||||
|
||||
# 2.4.3
|
||||
* Generate `Codec` and `Converter` classes.
|
||||
* Generate `toString` methods.
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
include: package:pedantic/analysis_options.yaml
|
||||
analyzer:
|
||||
strong-mode:
|
||||
implicit-casts: false
|
||||
implicit-casts: false
|
||||
linter:
|
||||
rules:
|
||||
- unnecessary_new
|
||||
- unnecessary_const
|
|
@ -17,7 +17,7 @@ class Todo extends _Todo {
|
|||
final bool completed;
|
||||
|
||||
Todo copyWith({String text, bool completed}) {
|
||||
return new Todo(
|
||||
return Todo(
|
||||
text: text ?? this.text, completed: completed ?? this.completed);
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ class Todo extends _Todo {
|
|||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const TodoSerializer todoSerializer = const TodoSerializer();
|
||||
const TodoSerializer todoSerializer = TodoSerializer();
|
||||
|
||||
class TodoEncoder extends Converter<Todo, Map> {
|
||||
const TodoEncoder();
|
||||
|
@ -68,7 +68,7 @@ class TodoSerializer extends Codec<Todo, Map> {
|
|||
@override
|
||||
get decoder => const TodoDecoder();
|
||||
static Todo fromMap(Map map) {
|
||||
return new Todo(
|
||||
return Todo(
|
||||
text: map['text'] as String, completed: map['completed'] as bool);
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ import 'package:angel_serialize/angel_serialize.dart';
|
|||
import 'package:build/build.dart';
|
||||
import 'package:code_buffer/code_buffer.dart';
|
||||
import 'package:code_builder/code_builder.dart';
|
||||
import 'package:dart_style/dart_style.dart';
|
||||
import 'package:path/path.dart' as p;
|
||||
import 'package:recase/recase.dart';
|
||||
import 'package:source_gen/source_gen.dart' hide LibraryBuilder;
|
||||
|
@ -26,22 +25,21 @@ part 'serialize.dart';
|
|||
part 'typescript.dart';
|
||||
|
||||
Builder jsonModelBuilder(_) {
|
||||
return new SharedPartBuilder(
|
||||
const [const JsonModelGenerator()], 'angel_serialize');
|
||||
return SharedPartBuilder(const [JsonModelGenerator()], 'angel_serialize');
|
||||
}
|
||||
|
||||
Builder serializerBuilder(_) {
|
||||
return new SharedPartBuilder(
|
||||
const [const SerializerGenerator()], 'angel_serialize_serializer');
|
||||
return SharedPartBuilder(
|
||||
const [SerializerGenerator()], 'angel_serialize_serializer');
|
||||
}
|
||||
|
||||
Builder typescriptDefinitionBuilder(_) {
|
||||
return const TypeScriptDefinitionBuilder();
|
||||
return TypeScriptDefinitionBuilder();
|
||||
}
|
||||
|
||||
/// Converts a [DartType] to a [TypeReference].
|
||||
TypeReference convertTypeReference(DartType t) {
|
||||
return new TypeReference((b) {
|
||||
return TypeReference((b) {
|
||||
b..symbol = t.name;
|
||||
|
||||
if (t is InterfaceType) {
|
||||
|
@ -94,9 +92,7 @@ String dartObjectToString(DartObject v) {
|
|||
'}';
|
||||
}
|
||||
if (v.toStringValue() != null) {
|
||||
return literalString(v.toStringValue())
|
||||
.accept(new DartEmitter())
|
||||
.toString();
|
||||
return literalString(v.toStringValue()).accept(DartEmitter()).toString();
|
||||
}
|
||||
if (type is InterfaceType && type.element.isEnum) {
|
||||
// Find the index of the enum, then find the member.
|
||||
|
@ -110,7 +106,7 @@ String dartObjectToString(DartObject v) {
|
|||
}
|
||||
}
|
||||
|
||||
throw new ArgumentError(v.toString());
|
||||
throw ArgumentError(v.toString());
|
||||
}
|
||||
|
||||
/// Determines if a type supports `package:angel_serialize`.
|
||||
|
|
|
@ -13,28 +13,28 @@ import 'package:source_gen/source_gen.dart';
|
|||
import 'context.dart';
|
||||
|
||||
// ignore: deprecated_member_use
|
||||
const TypeChecker aliasTypeChecker = const TypeChecker.fromRuntime(Alias);
|
||||
const TypeChecker aliasTypeChecker = TypeChecker.fromRuntime(Alias);
|
||||
|
||||
const TypeChecker dateTimeTypeChecker = const TypeChecker.fromRuntime(DateTime);
|
||||
const TypeChecker dateTimeTypeChecker = TypeChecker.fromRuntime(DateTime);
|
||||
|
||||
// ignore: deprecated_member_use
|
||||
const TypeChecker excludeTypeChecker = const TypeChecker.fromRuntime(Exclude);
|
||||
const TypeChecker excludeTypeChecker = TypeChecker.fromRuntime(Exclude);
|
||||
|
||||
const TypeChecker serializableFieldTypeChecker =
|
||||
const TypeChecker.fromRuntime(SerializableField);
|
||||
TypeChecker.fromRuntime(SerializableField);
|
||||
|
||||
const TypeChecker serializableTypeChecker =
|
||||
const TypeChecker.fromRuntime(Serializable);
|
||||
TypeChecker.fromRuntime(Serializable);
|
||||
|
||||
const TypeChecker generatedSerializableTypeChecker =
|
||||
const TypeChecker.fromRuntime(GeneratedSerializable);
|
||||
TypeChecker.fromRuntime(GeneratedSerializable);
|
||||
|
||||
final Map<String, BuildContext> _cache = {};
|
||||
|
||||
/// Create a [BuildContext].
|
||||
Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
||||
BuildStep buildStep, Resolver resolver, bool autoSnakeCaseNames,
|
||||
{bool heedExclude: true}) async {
|
||||
{bool heedExclude = true}) async {
|
||||
var id = clazz.location.components.join('-');
|
||||
if (_cache.containsKey(id)) {
|
||||
return _cache[id];
|
||||
|
@ -44,7 +44,7 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
autoSnakeCaseNames =
|
||||
annotation.peek('autoSnakeCaseNames')?.boolValue ?? autoSnakeCaseNames;
|
||||
|
||||
var ctx = new BuildContext(
|
||||
var ctx = BuildContext(
|
||||
annotation,
|
||||
clazz,
|
||||
originalClassName: clazz.name,
|
||||
|
@ -80,7 +80,7 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
if (sField.alias != null) {
|
||||
ctx.aliases[field.name] = sField.alias;
|
||||
} else if (autoSnakeCaseNames != false) {
|
||||
ctx.aliases[field.name] = new ReCase(field.name).snakeCase;
|
||||
ctx.aliases[field.name] = ReCase(field.name).snakeCase;
|
||||
}
|
||||
|
||||
if (sField.isNullable == false) {
|
||||
|
@ -91,7 +91,7 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
|
||||
if (sField.exclude) {
|
||||
// ignore: deprecated_member_use
|
||||
ctx.excluded[field.name] = new Exclude(
|
||||
ctx.excluded[field.name] = Exclude(
|
||||
canSerialize: sField.canSerialize,
|
||||
canDeserialize: sField.canDeserialize,
|
||||
);
|
||||
|
@ -123,11 +123,11 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
var excludeAnnotation = excludeTypeChecker.firstAnnotationOf(el);
|
||||
|
||||
if (excludeAnnotation != null) {
|
||||
var cr = new ConstantReader(excludeAnnotation);
|
||||
var cr = ConstantReader(excludeAnnotation);
|
||||
foundNone = false;
|
||||
|
||||
// ignore: deprecated_member_use
|
||||
ctx.excluded[field.name] = new Exclude(
|
||||
ctx.excluded[field.name] = Exclude(
|
||||
canSerialize: cr.read('canSerialize').boolValue,
|
||||
canDeserialize: cr.read('canDeserialize').boolValue,
|
||||
);
|
||||
|
@ -138,7 +138,7 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
// ignore: deprecated_member_use
|
||||
const TypeChecker.fromRuntime(DefaultValue).firstAnnotationOf(el);
|
||||
if (defAnn != null) {
|
||||
var rev = new ConstantReader(defAnn).revive().positionalArguments[0];
|
||||
var rev = ConstantReader(defAnn).revive().positionalArguments[0];
|
||||
ctx.defaults[field.name] = rev;
|
||||
foundNone = false;
|
||||
}
|
||||
|
@ -150,14 +150,14 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
|
||||
if (aliasAnn != null) {
|
||||
// ignore: deprecated_member_use
|
||||
alias = new Alias(aliasAnn.getField('name').toStringValue());
|
||||
alias = Alias(aliasAnn.getField('name').toStringValue());
|
||||
foundNone = false;
|
||||
}
|
||||
|
||||
if (alias?.name?.isNotEmpty == true) {
|
||||
ctx.aliases[field.name] = alias.name;
|
||||
} else if (autoSnakeCaseNames != false) {
|
||||
ctx.aliases[field.name] = new ReCase(field.name).snakeCase;
|
||||
ctx.aliases[field.name] = ReCase(field.name).snakeCase;
|
||||
}
|
||||
|
||||
// Check for @required
|
||||
|
@ -167,7 +167,7 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
if (required != null) {
|
||||
log.warning(
|
||||
'Using @required on fields (like ${clazz.name}.${field.name}) is now deprecated; use @SerializableField(isNullable: false) instead.');
|
||||
var cr = new ConstantReader(required);
|
||||
var cr = ConstantReader(required);
|
||||
var reason = cr.peek('reason')?.stringValue ??
|
||||
"Missing required field '${ctx.resolveFieldName(field.name)}' on ${ctx.modelClassName}.";
|
||||
ctx.requiredFields[field.name] = reason;
|
||||
|
@ -198,8 +198,7 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
|
||||
if (const TypeChecker.fromRuntime(Model).isAssignableFromType(clazz.type)) {
|
||||
if (!fieldNames.contains('id')) {
|
||||
var idField =
|
||||
new ShimFieldImpl('id', lib.context.typeProvider.stringType);
|
||||
var idField = ShimFieldImpl('id', lib.context.typeProvider.stringType);
|
||||
ctx.fields.insert(0, idField);
|
||||
ctx.shimmed['id'] = true;
|
||||
}
|
||||
|
@ -214,8 +213,8 @@ Future<BuildContext> buildContext(ClassElement clazz, ConstantReader annotation,
|
|||
dateTime = dt.type;
|
||||
}
|
||||
|
||||
var field = new ShimFieldImpl(key, dateTime);
|
||||
ctx.aliases[key] = new ReCase(key).snakeCase;
|
||||
var field = ShimFieldImpl(key, dateTime);
|
||||
ctx.aliases[key] = ReCase(key).snakeCase;
|
||||
ctx.fields.add(field);
|
||||
ctx.shimmed[key] = true;
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ class BuildContext {
|
|||
this.sourceFilename,
|
||||
this.autoSnakeCaseNames,
|
||||
this.autoIdAndDateFields,
|
||||
this.includeAnnotations: const <DartObject>[]});
|
||||
this.includeAnnotations = const <DartObject>[]});
|
||||
|
||||
/// The name of the generated class.
|
||||
String get modelClassName => originalClassName.startsWith('_')
|
||||
|
@ -63,10 +63,10 @@ class BuildContext {
|
|||
|
||||
/// A [ReCase] instance reflecting on the [modelClassName].
|
||||
ReCase get modelClassNameRecase =>
|
||||
_modelClassNameRecase ??= new ReCase(modelClassName);
|
||||
_modelClassNameRecase ??= ReCase(modelClassName);
|
||||
|
||||
TypeReference get modelClassType =>
|
||||
_modelClassType ??= new TypeReference((b) => b.symbol = modelClassName);
|
||||
_modelClassType ??= TypeReference((b) => b.symbol = modelClassName);
|
||||
|
||||
/// The [FieldElement] pointing to the primary key.
|
||||
FieldElement get primaryKeyField =>
|
||||
|
|
|
@ -12,18 +12,18 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
var ctx = await buildContext(element as ClassElement, annotation, buildStep,
|
||||
await buildStep.resolver, true);
|
||||
|
||||
var lib = new Library((b) {
|
||||
var lib = Library((b) {
|
||||
generateClass(ctx, b, annotation);
|
||||
});
|
||||
|
||||
var buf = lib.accept(new DartEmitter());
|
||||
var buf = lib.accept(DartEmitter());
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
/// Generate an extended model class.
|
||||
void generateClass(
|
||||
BuildContext ctx, LibraryBuilder file, ConstantReader annotation) {
|
||||
file.body.add(new Class((clazz) {
|
||||
file.body.add(Class((clazz) {
|
||||
clazz
|
||||
..name = ctx.modelClassNameRecase.pascalCase
|
||||
..annotations.add(refer('generatedSerializable'));
|
||||
|
@ -33,20 +33,20 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
}
|
||||
|
||||
if (shouldBeConstant(ctx)) {
|
||||
clazz.implements.add(new Reference(ctx.originalClassName));
|
||||
clazz.implements.add(Reference(ctx.originalClassName));
|
||||
} else {
|
||||
clazz.extend = new Reference(ctx.originalClassName);
|
||||
clazz.extend = Reference(ctx.originalClassName);
|
||||
}
|
||||
|
||||
//if (ctx.importsPackageMeta)
|
||||
// clazz.annotations.add(new CodeExpression(new Code('immutable')));
|
||||
// clazz.annotations.add(CodeExpression(Code('immutable')));
|
||||
|
||||
for (var field in ctx.fields) {
|
||||
clazz.fields.add(new Field((b) {
|
||||
clazz.fields.add(Field((b) {
|
||||
b
|
||||
..name = field.name
|
||||
..modifier = FieldModifier.final$
|
||||
..annotations.add(new CodeExpression(new Code('override')))
|
||||
..annotations.add(CodeExpression(Code('override')))
|
||||
..type = convertTypeReference(field.type);
|
||||
|
||||
for (var el in [field.getter, field]) {
|
||||
|
@ -67,11 +67,11 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
var serializers = annotation.peek('serializers')?.listValue ?? [];
|
||||
|
||||
if (serializers.any((o) => o.toIntValue() == Serializers.json)) {
|
||||
clazz.methods.add(new Method((method) {
|
||||
clazz.methods.add(Method((method) {
|
||||
method
|
||||
..name = 'toJson'
|
||||
..returns = new Reference('Map<String, dynamic>')
|
||||
..body = new Code('return ${clazz.name}Serializer.toMap(this);');
|
||||
..returns = Reference('Map<String, dynamic>')
|
||||
..body = Code('return ${clazz.name}Serializer.toMap(this);');
|
||||
}));
|
||||
}
|
||||
}));
|
||||
|
@ -87,13 +87,13 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
/// Generate a constructor with named parameters.
|
||||
void generateConstructor(
|
||||
BuildContext ctx, ClassBuilder clazz, LibraryBuilder file) {
|
||||
clazz.constructors.add(new Constructor((constructor) {
|
||||
clazz.constructors.add(Constructor((constructor) {
|
||||
// Add all `super` params
|
||||
constructor.constant = ctx.clazz.unnamedConstructor?.isConst == true ||
|
||||
shouldBeConstant(ctx);
|
||||
|
||||
for (var param in ctx.constructorParameters) {
|
||||
constructor.requiredParameters.add(new Parameter((b) => b
|
||||
constructor.requiredParameters.add(Parameter((b) => b
|
||||
..name = param.name
|
||||
..type = convertTypeReference(param.type)));
|
||||
}
|
||||
|
@ -111,14 +111,14 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
defaultValue = dartObjectToString(existingDefault);
|
||||
}
|
||||
|
||||
constructor.initializers.add(new Code('''
|
||||
constructor.initializers.add(Code('''
|
||||
this.${field.name} =
|
||||
new $typeName.unmodifiable(${field.name} ?? $defaultValue)'''));
|
||||
$typeName.unmodifiable(${field.name} ?? $defaultValue)'''));
|
||||
}
|
||||
}
|
||||
|
||||
for (var field in ctx.fields) {
|
||||
constructor.optionalParameters.add(new Parameter((b) {
|
||||
constructor.optionalParameters.add(Parameter((b) {
|
||||
b
|
||||
..toThis = shouldBeConstant(ctx)
|
||||
..name = field.name
|
||||
|
@ -127,7 +127,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
var existingDefault = ctx.defaults[field.name];
|
||||
|
||||
if (existingDefault != null) {
|
||||
b.defaultTo = new Code(dartObjectToString(existingDefault));
|
||||
b.defaultTo = Code(dartObjectToString(existingDefault));
|
||||
}
|
||||
|
||||
if (!isListOrMapType(field.type))
|
||||
|
@ -138,7 +138,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
|
||||
if (ctx.requiredFields.containsKey(field.name) &&
|
||||
b.defaultTo == null) {
|
||||
b.annotations.add(new CodeExpression(new Code('required')));
|
||||
b.annotations.add(CodeExpression(Code('required')));
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
@ -146,7 +146,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
if (ctx.constructorParameters.isNotEmpty) {
|
||||
if (!shouldBeConstant(ctx) ||
|
||||
ctx.clazz.unnamedConstructor?.isConst == true)
|
||||
constructor.initializers.add(new Code(
|
||||
constructor.initializers.add(Code(
|
||||
'super(${ctx.constructorParameters.map((p) => p.name).join(',')})'));
|
||||
}
|
||||
}));
|
||||
|
@ -155,7 +155,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
/// Generate a `copyWith` method.
|
||||
void generateCopyWithMethod(
|
||||
BuildContext ctx, ClassBuilder clazz, LibraryBuilder file) {
|
||||
clazz.methods.add(new Method((method) {
|
||||
clazz.methods.add(Method((method) {
|
||||
method
|
||||
..name = 'copyWith'
|
||||
..returns = ctx.modelClassType;
|
||||
|
@ -163,13 +163,13 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
// Add all `super` params
|
||||
if (ctx.constructorParameters.isNotEmpty) {
|
||||
for (var param in ctx.constructorParameters) {
|
||||
method.requiredParameters.add(new Parameter((b) => b
|
||||
method.requiredParameters.add(Parameter((b) => b
|
||||
..name = param.name
|
||||
..type = convertTypeReference(param.type)));
|
||||
}
|
||||
}
|
||||
|
||||
var buf = new StringBuffer('return new ${ctx.modelClassName}(');
|
||||
var buf = StringBuffer('return ${ctx.modelClassName}(');
|
||||
int i = 0;
|
||||
|
||||
for (var param in ctx.constructorParameters) {
|
||||
|
@ -179,7 +179,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
|
||||
// Add named parameters
|
||||
for (var field in ctx.fields) {
|
||||
method.optionalParameters.add(new Parameter((b) {
|
||||
method.optionalParameters.add(Parameter((b) {
|
||||
b
|
||||
..name = field.name
|
||||
..named = true
|
||||
|
@ -191,7 +191,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
}
|
||||
|
||||
buf.write(');');
|
||||
method.body = new Code(buf.toString());
|
||||
method.body = Code(buf.toString());
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -200,22 +200,22 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
if (const TypeChecker.fromRuntime(List).isAssignableFromType(type)) {
|
||||
if (type.typeParameters.length == 1) {
|
||||
var eq = generateEquality(type.typeArguments[0]);
|
||||
return 'const ListEquality<${type.typeArguments[0].name}>($eq)';
|
||||
return 'ListEquality<${type.typeArguments[0].name}>($eq)';
|
||||
} else
|
||||
return 'const ListEquality()';
|
||||
return 'ListEquality()';
|
||||
} else if (const TypeChecker.fromRuntime(Map)
|
||||
.isAssignableFromType(type)) {
|
||||
if (type.typeParameters.length == 2) {
|
||||
var keq = generateEquality(type.typeArguments[0]),
|
||||
veq = generateEquality(type.typeArguments[1]);
|
||||
return 'const MapEquality<${type.typeArguments[0].name}, ${type.typeArguments[1].name}>(keys: $keq, values: $veq)';
|
||||
return 'MapEquality<${type.typeArguments[0].name}, ${type.typeArguments[1].name}>(keys: $keq, values: $veq)';
|
||||
} else
|
||||
return 'const MapEquality()';
|
||||
return 'MapEquality()';
|
||||
}
|
||||
|
||||
return nullable ? null : 'const DefaultEquality<${type.name}>()';
|
||||
return nullable ? null : 'DefaultEquality<${type.name}>()';
|
||||
} else {
|
||||
return 'const DefaultEquality()';
|
||||
return 'DefaultEquality()';
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -229,7 +229,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
|
||||
void generateHashCode(BuildContext ctx, ClassBuilder clazz) {
|
||||
clazz
|
||||
..methods.add(new Method((method) {
|
||||
..methods.add(Method((method) {
|
||||
method
|
||||
..name = 'hashCode'
|
||||
..type = MethodType.getter
|
||||
|
@ -263,11 +263,11 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
|
||||
void generateEqualsOperator(
|
||||
BuildContext ctx, ClassBuilder clazz, LibraryBuilder file) {
|
||||
clazz.methods.add(new Method((method) {
|
||||
clazz.methods.add(Method((method) {
|
||||
method
|
||||
..name = 'operator =='
|
||||
..returns = new Reference('bool')
|
||||
..requiredParameters.add(new Parameter((b) => b.name = 'other'));
|
||||
..returns = Reference('bool')
|
||||
..requiredParameters.add(Parameter((b) => b.name = 'other'));
|
||||
|
||||
var buf = ['other is ${ctx.originalClassName}'];
|
||||
|
||||
|
@ -275,7 +275,7 @@ class JsonModelGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
return generateComparator(f.type)('other.${f.name}', f.name);
|
||||
}));
|
||||
|
||||
method.body = new Code('return ${buf.join('&&')};');
|
||||
method.body = Code('return ${buf.join('&&')};');
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ part of angel_serialize_generator;
|
|||
class SerializerGenerator extends GeneratorForAnnotation<Serializable> {
|
||||
final bool autoSnakeCaseNames;
|
||||
|
||||
const SerializerGenerator({this.autoSnakeCaseNames: true});
|
||||
const SerializerGenerator({this.autoSnakeCaseNames = true});
|
||||
|
||||
@override
|
||||
Future<String> generateForAnnotatedElement(
|
||||
|
@ -23,12 +23,12 @@ class SerializerGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
return null;
|
||||
}
|
||||
|
||||
var lib = new Library((b) {
|
||||
var lib = Library((b) {
|
||||
generateClass(serializers.map((s) => s.toIntValue()).toList(), ctx, b);
|
||||
generateFieldsClass(ctx, b);
|
||||
});
|
||||
|
||||
var buf = lib.accept(new DartEmitter());
|
||||
var buf = lib.accept(DartEmitter());
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
|
@ -40,8 +40,8 @@ class SerializerGenerator extends GeneratorForAnnotation<Serializable> {
|
|||
camel = ctx.modelClassNameRecase.camelCase;
|
||||
|
||||
if (ctx.constructorParameters.isEmpty) {
|
||||
file.body.add(new Code('''
|
||||
const ${pascal}Serializer ${camel}Serializer = const ${pascal}Serializer();
|
||||
file.body.add(Code('''
|
||||
const ${pascal}Serializer ${camel}Serializer = ${pascal}Serializer();
|
||||
|
||||
class ${pascal}Encoder extends Converter<${pascal}, Map> {
|
||||
const ${pascal}Encoder();
|
||||
|
@ -59,7 +59,7 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
'''));
|
||||
}
|
||||
|
||||
file.body.add(new Class((clazz) {
|
||||
file.body.add(Class((clazz) {
|
||||
clazz..name = '${pascal}Serializer';
|
||||
if (ctx.constructorParameters.isEmpty) {
|
||||
clazz
|
||||
|
@ -96,24 +96,24 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
|
||||
void generateToMapMethod(
|
||||
ClassBuilder clazz, BuildContext ctx, LibraryBuilder file) {
|
||||
clazz.methods.add(new Method((method) {
|
||||
clazz.methods.add(Method((method) {
|
||||
method
|
||||
..static = true
|
||||
..name = 'toMap'
|
||||
..returns = new Reference('Map<String, dynamic>')
|
||||
..requiredParameters.add(new Parameter((b) {
|
||||
..returns = Reference('Map<String, dynamic>')
|
||||
..requiredParameters.add(Parameter((b) {
|
||||
b
|
||||
..name = 'model'
|
||||
..type = refer(ctx.originalClassName);
|
||||
}));
|
||||
|
||||
var buf = new StringBuffer();
|
||||
var buf = StringBuffer();
|
||||
|
||||
ctx.requiredFields.forEach((key, msg) {
|
||||
if (ctx.excluded[key]?.canSerialize == false) return;
|
||||
buf.writeln('''
|
||||
if (model.$key == null) {
|
||||
throw new FormatException("$msg");
|
||||
throw FormatException("$msg");
|
||||
}
|
||||
''');
|
||||
});
|
||||
|
@ -152,21 +152,21 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
|
||||
// Serialize model classes via `XSerializer.toMap`
|
||||
else if (isModelClass(type)) {
|
||||
var rc = new ReCase(type.name);
|
||||
var rc = ReCase(type.name);
|
||||
serializedRepresentation =
|
||||
'${serializerToMap(rc, 'model.${field.name}')}';
|
||||
} else if (type is InterfaceType) {
|
||||
if (isListOfModelType(type)) {
|
||||
var name = type.typeArguments[0].name;
|
||||
if (name.startsWith('_')) name = name.substring(1);
|
||||
var rc = new ReCase(name);
|
||||
var rc = ReCase(name);
|
||||
var m = serializerToMap(rc, 'm');
|
||||
serializedRepresentation = '''
|
||||
model.${field.name}
|
||||
?.map((m) => $m)
|
||||
?.toList()''';
|
||||
} else if (isMapToModelType(type)) {
|
||||
var rc = new ReCase(type.typeArguments[1].name);
|
||||
var rc = ReCase(type.typeArguments[1].name);
|
||||
serializedRepresentation =
|
||||
'''model.${field.name}.keys?.fold({}, (map, key) {
|
||||
return map..[key] =
|
||||
|
@ -192,48 +192,48 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
}
|
||||
|
||||
buf.write('};');
|
||||
method.body = new Block.of([
|
||||
new Code('if (model == null) { return null; }'),
|
||||
new Code(buf.toString()),
|
||||
method.body = Block.of([
|
||||
Code('if (model == null) { return null; }'),
|
||||
Code(buf.toString()),
|
||||
]);
|
||||
}));
|
||||
}
|
||||
|
||||
void generateFromMapMethod(
|
||||
ClassBuilder clazz, BuildContext ctx, LibraryBuilder file) {
|
||||
clazz.methods.add(new Method((method) {
|
||||
clazz.methods.add(Method((method) {
|
||||
method
|
||||
..static = true
|
||||
..name = 'fromMap'
|
||||
..returns = ctx.modelClassType
|
||||
..requiredParameters.add(
|
||||
new Parameter((b) => b
|
||||
Parameter((b) => b
|
||||
..name = 'map'
|
||||
..type = new Reference('Map')),
|
||||
..type = Reference('Map')),
|
||||
);
|
||||
|
||||
// Add all `super` params
|
||||
if (ctx.constructorParameters.isNotEmpty) {
|
||||
for (var param in ctx.constructorParameters) {
|
||||
method.requiredParameters.add(new Parameter((b) => b
|
||||
method.requiredParameters.add(Parameter((b) => b
|
||||
..name = param.name
|
||||
..type = convertTypeReference(param.type)));
|
||||
}
|
||||
}
|
||||
|
||||
var buf = new StringBuffer();
|
||||
var buf = StringBuffer();
|
||||
|
||||
ctx.requiredFields.forEach((key, msg) {
|
||||
if (ctx.excluded[key]?.canDeserialize == false) return;
|
||||
var name = ctx.resolveFieldName(key);
|
||||
buf.writeln('''
|
||||
if (map['$name'] == null) {
|
||||
throw new FormatException("$msg");
|
||||
throw FormatException("$msg");
|
||||
}
|
||||
''');
|
||||
});
|
||||
|
||||
buf.writeln('return new ${ctx.modelClassName}(');
|
||||
buf.writeln('return ${ctx.modelClassName}(');
|
||||
int i = 0;
|
||||
|
||||
for (var param in ctx.constructorParameters) {
|
||||
|
@ -273,24 +273,24 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
|
||||
// Serialize model classes via `XSerializer.toMap`
|
||||
else if (isModelClass(type)) {
|
||||
var rc = new ReCase(type.name);
|
||||
var rc = ReCase(type.name);
|
||||
deserializedRepresentation = "map['$alias'] != null"
|
||||
" ? ${rc.pascalCase}Serializer.fromMap(map['$alias'] as Map)"
|
||||
" : $defaultValue";
|
||||
} else if (type is InterfaceType) {
|
||||
if (isListOfModelType(type)) {
|
||||
var rc = new ReCase(type.typeArguments[0].name);
|
||||
var rc = ReCase(type.typeArguments[0].name);
|
||||
deserializedRepresentation = "map['$alias'] is Iterable"
|
||||
" ? new List.unmodifiable(((map['$alias'] as Iterable)"
|
||||
" ? List.unmodifiable(((map['$alias'] as Iterable)"
|
||||
".where((x) => x is Map))"
|
||||
".cast<Map>()"
|
||||
".map(${rc.pascalCase}Serializer.fromMap))"
|
||||
" : $defaultValue";
|
||||
} else if (isMapToModelType(type)) {
|
||||
var rc = new ReCase(type.typeArguments[1].name);
|
||||
var rc = ReCase(type.typeArguments[1].name);
|
||||
deserializedRepresentation = '''
|
||||
map['$alias'] is Map
|
||||
? new Map.unmodifiable((map['$alias'] as Map).keys.fold({}, (out, key) {
|
||||
? Map.unmodifiable((map['$alias'] as Map).keys.fold({}, (out, key) {
|
||||
return out..[key] = ${rc.pascalCase}Serializer
|
||||
.fromMap(((map['$alias'] as Map)[key]) as Map);
|
||||
}))
|
||||
|
@ -311,7 +311,7 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
.isAssignableFromType(type) &&
|
||||
type.typeArguments.length == 1) {
|
||||
var arg = convertTypeReference(type.typeArguments[0])
|
||||
.accept(new DartEmitter());
|
||||
.accept(DartEmitter());
|
||||
deserializedRepresentation = '''
|
||||
map['$alias'] is Iterable
|
||||
? (map['$alias'] as Iterable).cast<$arg>().toList()
|
||||
|
@ -321,9 +321,9 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
.isAssignableFromType(type) &&
|
||||
type.typeArguments.length == 2) {
|
||||
var key = convertTypeReference(type.typeArguments[0])
|
||||
.accept(new DartEmitter());
|
||||
.accept(DartEmitter());
|
||||
var value = convertTypeReference(type.typeArguments[1])
|
||||
.accept(new DartEmitter());
|
||||
.accept(DartEmitter());
|
||||
deserializedRepresentation = '''
|
||||
map['$alias'] is Map
|
||||
? (map['$alias'] as Map).cast<$key, $value>()
|
||||
|
@ -337,11 +337,11 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
:
|
||||
(
|
||||
map['$alias'] is Iterable<int>
|
||||
? new Uint8List.fromList((map['$alias'] as Iterable<int>).toList())
|
||||
? Uint8List.fromList((map['$alias'] as Iterable<int>).toList())
|
||||
:
|
||||
(
|
||||
map['$alias'] is String
|
||||
? new Uint8List.fromList(base64.decode(map['$alias'] as String))
|
||||
? Uint8List.fromList(base64.decode(map['$alias'] as String))
|
||||
: $defaultValue
|
||||
)
|
||||
)
|
||||
|
@ -353,21 +353,21 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
}
|
||||
|
||||
buf.write(');');
|
||||
method.body = new Code(buf.toString());
|
||||
method.body = Code(buf.toString());
|
||||
}));
|
||||
}
|
||||
|
||||
void generateFieldsClass(BuildContext ctx, LibraryBuilder file) {
|
||||
file.body.add(new Class((clazz) {
|
||||
file.body.add(Class((clazz) {
|
||||
clazz
|
||||
..abstract = true
|
||||
..name = '${ctx.modelClassNameRecase.pascalCase}Fields';
|
||||
|
||||
clazz.fields.add(new Field((b) {
|
||||
clazz.fields.add(Field((b) {
|
||||
b
|
||||
..static = true
|
||||
..modifier = FieldModifier.constant
|
||||
..type = new TypeReference((b) => b
|
||||
..type = TypeReference((b) => b
|
||||
..symbol = 'List'
|
||||
..types.add(refer('String')))
|
||||
..name = 'allFields'
|
||||
|
@ -378,13 +378,13 @@ class ${pascal}Decoder extends Converter<Map, ${pascal}> {
|
|||
}));
|
||||
|
||||
for (var field in ctx.fields) {
|
||||
clazz.fields.add(new Field((b) {
|
||||
clazz.fields.add(Field((b) {
|
||||
b
|
||||
..static = true
|
||||
..modifier = FieldModifier.constant
|
||||
..type = new Reference('String')
|
||||
..type = Reference('String')
|
||||
..name = field.name
|
||||
..assignment = new Code("'${ctx.resolveFieldName(field.name)}'");
|
||||
..assignment = Code("'${ctx.resolveFieldName(field.name)}'");
|
||||
}));
|
||||
}
|
||||
}));
|
||||
|
|
|
@ -3,7 +3,7 @@ part of angel_serialize_generator;
|
|||
class TypeScriptDefinitionBuilder implements Builder {
|
||||
final bool autoSnakeCaseNames;
|
||||
|
||||
const TypeScriptDefinitionBuilder({this.autoSnakeCaseNames: true});
|
||||
const TypeScriptDefinitionBuilder({this.autoSnakeCaseNames = true});
|
||||
|
||||
@override
|
||||
Map<String, List<String>> get buildExtensions {
|
||||
|
@ -29,7 +29,7 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
};
|
||||
|
||||
types.forEach((t, tsType) {
|
||||
if (new TypeChecker.fromRuntime(t).isAssignableFromType(type))
|
||||
if (TypeChecker.fromRuntime(t).isAssignableFromType(type))
|
||||
typeScriptType = tsType;
|
||||
});
|
||||
|
||||
|
@ -48,7 +48,7 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
//var modelType = type.typeArguments[1];
|
||||
/*var innerCtx = await buildContext(
|
||||
modelType.element,
|
||||
new ConstantReader(
|
||||
ConstantReader(
|
||||
serializableTypeChecker.firstAnnotationOf(modelType.element)),
|
||||
buildStep,
|
||||
buildStep.resolver,
|
||||
|
@ -56,10 +56,10 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
true,
|
||||
);*/
|
||||
|
||||
typeScriptType = ctx.modelClassNameRecase.pascalCase +
|
||||
new ReCase(fieldName).pascalCase;
|
||||
typeScriptType =
|
||||
ctx.modelClassNameRecase.pascalCase + ReCase(fieldName).pascalCase;
|
||||
|
||||
ext.add(new CodeBuffer()
|
||||
ext.add(CodeBuffer()
|
||||
..writeln('interface $typeScriptType {')
|
||||
..indent()
|
||||
..writeln('[key: $key]: $value;')
|
||||
|
@ -67,7 +67,7 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
..writeln('}'));
|
||||
} else if (const TypeChecker.fromRuntime(List)
|
||||
.isAssignableFromType(type)) {
|
||||
if (type.typeArguments.length == 0)
|
||||
if (type.typeArguments.isEmpty)
|
||||
typeScriptType = 'any[]';
|
||||
else {
|
||||
var arg = await compileToTypeScriptType(
|
||||
|
@ -104,7 +104,7 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
|
||||
var ctx = await buildContext(
|
||||
type.element,
|
||||
new ConstantReader(
|
||||
ConstantReader(
|
||||
serializableTypeChecker.firstAnnotationOf(type.element)),
|
||||
buildStep,
|
||||
buildStep.resolver,
|
||||
|
@ -123,7 +123,7 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
LibraryReader lib;
|
||||
|
||||
try {
|
||||
lib = new LibraryReader(await buildStep.inputLibrary);
|
||||
lib = LibraryReader(await buildStep.inputLibrary);
|
||||
} catch (_) {
|
||||
return;
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
if (contexts.isEmpty) return;
|
||||
|
||||
var refs = <String>[];
|
||||
var buf = new CodeBuffer(
|
||||
var buf = CodeBuffer(
|
||||
trailingNewline: true,
|
||||
sourceUrl: buildStep.inputId.uri,
|
||||
);
|
||||
|
@ -209,11 +209,11 @@ class TypeScriptDefinitionBuilder implements Builder {
|
|||
buf
|
||||
..outdent()
|
||||
..writeln('}');
|
||||
var finalBuf = new CodeBuffer();
|
||||
var finalBuf = CodeBuffer();
|
||||
refs.forEach(finalBuf.writeln);
|
||||
buf.copyInto(finalBuf);
|
||||
|
||||
buildStep.writeAsString(
|
||||
await buildStep.writeAsString(
|
||||
buildStep.inputId.changeExtension('.d.ts'),
|
||||
finalBuf.toString(),
|
||||
);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: angel_serialize_generator
|
||||
version: 2.4.3
|
||||
version: 2.4.4
|
||||
description: Model serialization generators, designed for use with Angel. Combine with angel_serialize for flexible modeling.
|
||||
author: Tobe O <thosakwe@gmail.com>
|
||||
homepage: https://github.com/angel-dart/serialize
|
||||
|
@ -21,6 +21,7 @@ dependencies:
|
|||
dev_dependencies:
|
||||
build_runner: ^1.0.0
|
||||
collection: ^1.0.0
|
||||
pedantic: ^1.0.0
|
||||
test: ^1.0.0
|
||||
# dependency_overrides:
|
||||
# angel_serialize:
|
||||
|
|
|
@ -4,18 +4,18 @@ import 'models/book.dart';
|
|||
const String deathlyHallowsIsbn = '0-545-01022-5';
|
||||
|
||||
main() {
|
||||
var deathlyHallows = new Book(
|
||||
var deathlyHallows = Book(
|
||||
id: '0',
|
||||
author: 'J.K. Rowling',
|
||||
title: 'Harry Potter and the Deathly Hallows',
|
||||
description: 'The 7th book.',
|
||||
pageCount: 759,
|
||||
notModels: [1.0, 3.0],
|
||||
updatedAt: new DateTime.now());
|
||||
updatedAt: DateTime.now());
|
||||
var serializedDeathlyHallows = deathlyHallows.toJson();
|
||||
print('Deathly Hallows: $deathlyHallows');
|
||||
|
||||
var jkRowling = new Author(
|
||||
var jkRowling = Author(
|
||||
id: '1',
|
||||
name: 'J.K. Rowling',
|
||||
age: 51,
|
||||
|
@ -25,7 +25,7 @@ main() {
|
|||
var deathlyHallowsMap = bookSerializer.encode(deathlyHallows);
|
||||
print('J.K. Rowling: $jkRowling');
|
||||
|
||||
var library = new Library(collection: {deathlyHallowsIsbn: deathlyHallows});
|
||||
var library = Library(collection: {deathlyHallowsIsbn: deathlyHallows});
|
||||
var serializedLibrary = LibrarySerializer.toMap(library);
|
||||
print('Library: $library');
|
||||
|
||||
|
@ -55,7 +55,7 @@ main() {
|
|||
});
|
||||
|
||||
test('heeds canDeserialize', () {
|
||||
var map = new Map.from(serializedJkRowling)..['obscured'] = 'foo';
|
||||
var map = Map.from(serializedJkRowling)..['obscured'] = 'foo';
|
||||
var author = authorSerializer.decode(map);
|
||||
expect(author.obscured, 'foo');
|
||||
});
|
||||
|
@ -99,7 +99,7 @@ main() {
|
|||
});
|
||||
|
||||
test('required fields toMap', () {
|
||||
var author = new Author(name: null, age: 24);
|
||||
var author = Author(name: null, age: 24);
|
||||
expect(() => author.toJson(), throwsFormatException);
|
||||
});
|
||||
|
||||
|
|
|
@ -3,17 +3,17 @@ import 'dart:typed_data';
|
|||
import 'package:test/test.dart';
|
||||
import 'models/with_enum.dart';
|
||||
|
||||
const WithEnum aWithEnum = const WithEnum(type: WithEnumType.a);
|
||||
const WithEnum aWithEnum2 = const WithEnum(type: WithEnumType.a);
|
||||
const WithEnum aWithEnum = WithEnum(type: WithEnumType.a);
|
||||
const WithEnum aWithEnum2 = WithEnum(type: WithEnumType.a);
|
||||
|
||||
void main() {
|
||||
test('enum serializes to int', () {
|
||||
var w = new WithEnum(type: WithEnumType.b).toJson();
|
||||
var w = WithEnum(type: WithEnumType.b).toJson();
|
||||
expect(w[WithEnumFields.type], WithEnumType.values.indexOf(WithEnumType.b));
|
||||
});
|
||||
|
||||
test('enum serializes null if null', () {
|
||||
var w = new WithEnum(type: null).toJson();
|
||||
var w = WithEnum(type: null).toJson();
|
||||
expect(w[WithEnumFields.type], null);
|
||||
});
|
||||
|
||||
|
@ -38,10 +38,9 @@ void main() {
|
|||
});
|
||||
|
||||
test('equality', () {
|
||||
expect(WithEnum(type: WithEnumType.a), WithEnum(type: WithEnumType.a));
|
||||
expect(
|
||||
new WithEnum(type: WithEnumType.a), new WithEnum(type: WithEnumType.a));
|
||||
expect(new WithEnum(type: WithEnumType.a),
|
||||
isNot(new WithEnum(type: WithEnumType.b)));
|
||||
WithEnum(type: WithEnumType.a), isNot(WithEnum(type: WithEnumType.b)));
|
||||
});
|
||||
|
||||
test('const', () {
|
||||
|
@ -49,9 +48,8 @@ void main() {
|
|||
});
|
||||
|
||||
test('uint8list', () {
|
||||
var ee = new WithEnum(
|
||||
imageBytes:
|
||||
new Uint8List.fromList(new List<int>.generate(1000, (i) => i)));
|
||||
var ee = WithEnum(
|
||||
imageBytes: Uint8List.fromList(List<int>.generate(1000, (i) => i)));
|
||||
var eeMap = ee.toJson();
|
||||
print(ee);
|
||||
var ef = WithEnumSerializer.fromMap(eeMap);
|
||||
|
|
|
@ -20,7 +20,7 @@ class Book extends _Book {
|
|||
this.camelCaseString,
|
||||
this.createdAt,
|
||||
this.updatedAt})
|
||||
: this.notModels = new List.unmodifiable(notModels ?? []);
|
||||
: this.notModels = List.unmodifiable(notModels ?? []);
|
||||
|
||||
@override
|
||||
final String id;
|
||||
|
@ -60,7 +60,7 @@ class Book extends _Book {
|
|||
String camelCaseString,
|
||||
DateTime createdAt,
|
||||
DateTime updatedAt}) {
|
||||
return new Book(
|
||||
return Book(
|
||||
id: id ?? this.id,
|
||||
author: author ?? this.author,
|
||||
title: title ?? this.title,
|
||||
|
@ -79,7 +79,7 @@ class Book extends _Book {
|
|||
other.title == title &&
|
||||
other.description == description &&
|
||||
other.pageCount == pageCount &&
|
||||
const ListEquality<double>(const DefaultEquality<double>())
|
||||
ListEquality<double>(DefaultEquality<double>())
|
||||
.equals(other.notModels, notModels) &&
|
||||
other.camelCaseString == camelCaseString &&
|
||||
other.createdAt == createdAt &&
|
||||
|
@ -123,7 +123,7 @@ class Author extends _Author {
|
|||
this.obscured,
|
||||
this.createdAt,
|
||||
this.updatedAt})
|
||||
: this.books = new List.unmodifiable(books ?? []);
|
||||
: this.books = List.unmodifiable(books ?? []);
|
||||
|
||||
@override
|
||||
final String id;
|
||||
|
@ -163,7 +163,7 @@ class Author extends _Author {
|
|||
String obscured,
|
||||
DateTime createdAt,
|
||||
DateTime updatedAt}) {
|
||||
return new Author(
|
||||
return Author(
|
||||
id: id ?? this.id,
|
||||
name: name ?? this.name,
|
||||
age: age ?? this.age,
|
||||
|
@ -180,7 +180,7 @@ class Author extends _Author {
|
|||
other.id == id &&
|
||||
other.name == name &&
|
||||
other.age == age &&
|
||||
const ListEquality<_Book>(const DefaultEquality<_Book>())
|
||||
ListEquality<_Book>(DefaultEquality<_Book>())
|
||||
.equals(other.books, books) &&
|
||||
other.newestBook == newestBook &&
|
||||
other.secret == secret &&
|
||||
|
@ -218,7 +218,7 @@ class Author extends _Author {
|
|||
class Library extends _Library {
|
||||
Library(
|
||||
{this.id, Map<String, _Book> collection, this.createdAt, this.updatedAt})
|
||||
: this.collection = new Map.unmodifiable(collection ?? {});
|
||||
: this.collection = Map.unmodifiable(collection ?? {});
|
||||
|
||||
@override
|
||||
final String id;
|
||||
|
@ -237,7 +237,7 @@ class Library extends _Library {
|
|||
Map<String, _Book> collection,
|
||||
DateTime createdAt,
|
||||
DateTime updatedAt}) {
|
||||
return new Library(
|
||||
return Library(
|
||||
id: id ?? this.id,
|
||||
collection: collection ?? this.collection,
|
||||
createdAt: createdAt ?? this.createdAt,
|
||||
|
@ -247,9 +247,9 @@ class Library extends _Library {
|
|||
bool operator ==(other) {
|
||||
return other is _Library &&
|
||||
other.id == id &&
|
||||
const MapEquality<String, _Book>(
|
||||
keys: const DefaultEquality<String>(),
|
||||
values: const DefaultEquality<_Book>())
|
||||
MapEquality<String, _Book>(
|
||||
keys: DefaultEquality<String>(),
|
||||
values: DefaultEquality<_Book>())
|
||||
.equals(other.collection, collection) &&
|
||||
other.createdAt == createdAt &&
|
||||
other.updatedAt == updatedAt;
|
||||
|
@ -279,7 +279,7 @@ class Bookmark extends _Bookmark {
|
|||
this.comment,
|
||||
this.createdAt,
|
||||
this.updatedAt})
|
||||
: this.history = new List.unmodifiable(history ?? []),
|
||||
: this.history = List.unmodifiable(history ?? []),
|
||||
super(book);
|
||||
|
||||
@override
|
||||
|
@ -307,7 +307,7 @@ class Bookmark extends _Bookmark {
|
|||
String comment,
|
||||
DateTime createdAt,
|
||||
DateTime updatedAt}) {
|
||||
return new Bookmark(book,
|
||||
return Bookmark(book,
|
||||
id: id ?? this.id,
|
||||
history: history ?? this.history,
|
||||
page: page ?? this.page,
|
||||
|
@ -319,7 +319,7 @@ class Bookmark extends _Bookmark {
|
|||
bool operator ==(other) {
|
||||
return other is _Bookmark &&
|
||||
other.id == id &&
|
||||
const ListEquality<int>(const DefaultEquality<int>())
|
||||
ListEquality<int>(DefaultEquality<int>())
|
||||
.equals(other.history, history) &&
|
||||
other.page == page &&
|
||||
other.comment == comment &&
|
||||
|
@ -346,7 +346,7 @@ class Bookmark extends _Bookmark {
|
|||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const BookSerializer bookSerializer = const BookSerializer();
|
||||
const BookSerializer bookSerializer = BookSerializer();
|
||||
|
||||
class BookEncoder extends Converter<Book, Map> {
|
||||
const BookEncoder();
|
||||
|
@ -370,7 +370,7 @@ class BookSerializer extends Codec<Book, Map> {
|
|||
@override
|
||||
get decoder => const BookDecoder();
|
||||
static Book fromMap(Map map) {
|
||||
return new Book(
|
||||
return Book(
|
||||
id: map['id'] as String,
|
||||
author: map['author'] as String,
|
||||
title: map['title'] as String,
|
||||
|
@ -442,7 +442,7 @@ abstract class BookFields {
|
|||
static const String updatedAt = 'updated_at';
|
||||
}
|
||||
|
||||
const AuthorSerializer authorSerializer = const AuthorSerializer();
|
||||
const AuthorSerializer authorSerializer = AuthorSerializer();
|
||||
|
||||
class AuthorEncoder extends Converter<Author, Map> {
|
||||
const AuthorEncoder();
|
||||
|
@ -467,19 +467,19 @@ class AuthorSerializer extends Codec<Author, Map> {
|
|||
get decoder => const AuthorDecoder();
|
||||
static Author fromMap(Map map) {
|
||||
if (map['name'] == null) {
|
||||
throw new FormatException("Missing required field 'name' on Author.");
|
||||
throw FormatException("Missing required field 'name' on Author.");
|
||||
}
|
||||
|
||||
if (map['age'] == null) {
|
||||
throw new FormatException("Custom message for missing `age`");
|
||||
throw FormatException("Custom message for missing `age`");
|
||||
}
|
||||
|
||||
return new Author(
|
||||
return Author(
|
||||
id: map['id'] as String,
|
||||
name: map['name'] as String,
|
||||
age: map['age'] as int,
|
||||
books: map['books'] is Iterable
|
||||
? new List.unmodifiable(
|
||||
? List.unmodifiable(
|
||||
((map['books'] as Iterable).where((x) => x is Map))
|
||||
.cast<Map>()
|
||||
.map(BookSerializer.fromMap))
|
||||
|
@ -505,11 +505,11 @@ class AuthorSerializer extends Codec<Author, Map> {
|
|||
return null;
|
||||
}
|
||||
if (model.name == null) {
|
||||
throw new FormatException("Missing required field 'name' on Author.");
|
||||
throw FormatException("Missing required field 'name' on Author.");
|
||||
}
|
||||
|
||||
if (model.age == null) {
|
||||
throw new FormatException("Custom message for missing `age`");
|
||||
throw FormatException("Custom message for missing `age`");
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -556,7 +556,7 @@ abstract class AuthorFields {
|
|||
static const String updatedAt = 'updated_at';
|
||||
}
|
||||
|
||||
const LibrarySerializer librarySerializer = const LibrarySerializer();
|
||||
const LibrarySerializer librarySerializer = LibrarySerializer();
|
||||
|
||||
class LibraryEncoder extends Converter<Library, Map> {
|
||||
const LibraryEncoder();
|
||||
|
@ -580,10 +580,10 @@ class LibrarySerializer extends Codec<Library, Map> {
|
|||
@override
|
||||
get decoder => const LibraryDecoder();
|
||||
static Library fromMap(Map map) {
|
||||
return new Library(
|
||||
return Library(
|
||||
id: map['id'] as String,
|
||||
collection: map['collection'] is Map
|
||||
? new Map.unmodifiable(
|
||||
? Map.unmodifiable(
|
||||
(map['collection'] as Map).keys.fold({}, (out, key) {
|
||||
return out
|
||||
..[key] = BookSerializer.fromMap(
|
||||
|
@ -637,10 +637,10 @@ abstract class LibraryFields {
|
|||
abstract class BookmarkSerializer {
|
||||
static Bookmark fromMap(Map map, _Book book) {
|
||||
if (map['page'] == null) {
|
||||
throw new FormatException("Missing required field 'page' on Bookmark.");
|
||||
throw FormatException("Missing required field 'page' on Bookmark.");
|
||||
}
|
||||
|
||||
return new Bookmark(book,
|
||||
return Bookmark(book,
|
||||
id: map['id'] as String,
|
||||
history: map['history'] is Iterable
|
||||
? (map['history'] as Iterable).cast<int>().toList()
|
||||
|
@ -664,7 +664,7 @@ abstract class BookmarkSerializer {
|
|||
return null;
|
||||
}
|
||||
if (model.page == null) {
|
||||
throw new FormatException("Missing required field 'page' on Bookmark.");
|
||||
throw FormatException("Missing required field 'page' on Bookmark.");
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
@ -17,7 +17,7 @@ class GamepadButton implements _GamepadButton {
|
|||
final int radius;
|
||||
|
||||
GamepadButton copyWith({String name, int radius}) {
|
||||
return new GamepadButton(
|
||||
return GamepadButton(
|
||||
name: name ?? this.name, radius: radius ?? this.radius);
|
||||
}
|
||||
|
||||
|
@ -45,8 +45,8 @@ class GamepadButton implements _GamepadButton {
|
|||
@generatedSerializable
|
||||
class Gamepad extends _Gamepad {
|
||||
Gamepad({List<_GamepadButton> buttons, Map<String, dynamic> dynamicMap})
|
||||
: this.buttons = new List.unmodifiable(buttons ?? []),
|
||||
this.dynamicMap = new Map.unmodifiable(dynamicMap ?? {});
|
||||
: this.buttons = List.unmodifiable(buttons ?? []),
|
||||
this.dynamicMap = Map.unmodifiable(dynamicMap ?? {});
|
||||
|
||||
@override
|
||||
final List<_GamepadButton> buttons;
|
||||
|
@ -56,19 +56,17 @@ class Gamepad extends _Gamepad {
|
|||
|
||||
Gamepad copyWith(
|
||||
{List<_GamepadButton> buttons, Map<String, dynamic> dynamicMap}) {
|
||||
return new Gamepad(
|
||||
return Gamepad(
|
||||
buttons: buttons ?? this.buttons,
|
||||
dynamicMap: dynamicMap ?? this.dynamicMap);
|
||||
}
|
||||
|
||||
bool operator ==(other) {
|
||||
return other is _Gamepad &&
|
||||
const ListEquality<_GamepadButton>(
|
||||
const DefaultEquality<_GamepadButton>())
|
||||
ListEquality<_GamepadButton>(DefaultEquality<_GamepadButton>())
|
||||
.equals(other.buttons, buttons) &&
|
||||
const MapEquality<String, dynamic>(
|
||||
keys: const DefaultEquality<String>(),
|
||||
values: const DefaultEquality())
|
||||
MapEquality<String, dynamic>(
|
||||
keys: DefaultEquality<String>(), values: DefaultEquality())
|
||||
.equals(other.dynamicMap, dynamicMap);
|
||||
}
|
||||
|
||||
|
@ -92,7 +90,7 @@ class Gamepad extends _Gamepad {
|
|||
// **************************************************************************
|
||||
|
||||
const GamepadButtonSerializer gamepadButtonSerializer =
|
||||
const GamepadButtonSerializer();
|
||||
GamepadButtonSerializer();
|
||||
|
||||
class GamepadButtonEncoder extends Converter<GamepadButton, Map> {
|
||||
const GamepadButtonEncoder();
|
||||
|
@ -116,7 +114,7 @@ class GamepadButtonSerializer extends Codec<GamepadButton, Map> {
|
|||
@override
|
||||
get decoder => const GamepadButtonDecoder();
|
||||
static GamepadButton fromMap(Map map) {
|
||||
return new GamepadButton(
|
||||
return GamepadButton(
|
||||
name: map['name'] as String, radius: map['radius'] as int);
|
||||
}
|
||||
|
||||
|
@ -136,7 +134,7 @@ abstract class GamepadButtonFields {
|
|||
static const String radius = 'radius';
|
||||
}
|
||||
|
||||
const GamepadSerializer gamepadSerializer = const GamepadSerializer();
|
||||
const GamepadSerializer gamepadSerializer = GamepadSerializer();
|
||||
|
||||
class GamepadEncoder extends Converter<Gamepad, Map> {
|
||||
const GamepadEncoder();
|
||||
|
@ -160,9 +158,9 @@ class GamepadSerializer extends Codec<Gamepad, Map> {
|
|||
@override
|
||||
get decoder => const GamepadDecoder();
|
||||
static Gamepad fromMap(Map map) {
|
||||
return new Gamepad(
|
||||
return Gamepad(
|
||||
buttons: map['buttons'] is Iterable
|
||||
? new List.unmodifiable(
|
||||
? List.unmodifiable(
|
||||
((map['buttons'] as Iterable).where((x) => x is Map))
|
||||
.cast<Map>()
|
||||
.map(GamepadButtonSerializer.fromMap))
|
||||
|
|
|
@ -17,14 +17,13 @@ class Goat implements _Goat {
|
|||
final List<int> list;
|
||||
|
||||
Goat copyWith({int integer, List<int> list}) {
|
||||
return new Goat(integer: integer ?? this.integer, list: list ?? this.list);
|
||||
return Goat(integer: integer ?? this.integer, list: list ?? this.list);
|
||||
}
|
||||
|
||||
bool operator ==(other) {
|
||||
return other is _Goat &&
|
||||
other.integer == integer &&
|
||||
const ListEquality<int>(const DefaultEquality<int>())
|
||||
.equals(other.list, list);
|
||||
ListEquality<int>(DefaultEquality<int>()).equals(other.list, list);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -46,7 +45,7 @@ class Goat implements _Goat {
|
|||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const GoatSerializer goatSerializer = const GoatSerializer();
|
||||
const GoatSerializer goatSerializer = GoatSerializer();
|
||||
|
||||
class GoatEncoder extends Converter<Goat, Map> {
|
||||
const GoatEncoder();
|
||||
|
@ -70,7 +69,7 @@ class GoatSerializer extends Codec<Goat, Map> {
|
|||
@override
|
||||
get decoder => const GoatDecoder();
|
||||
static Goat fromMap(Map map) {
|
||||
return new Goat(
|
||||
return Goat(
|
||||
integer: map['integer'] as int ?? 34,
|
||||
list: map['list'] is Iterable
|
||||
? (map['list'] as Iterable).cast<int>().toList()
|
||||
|
|
|
@ -14,13 +14,13 @@ class HasMap implements _HasMap {
|
|||
final Map<dynamic, dynamic> value;
|
||||
|
||||
HasMap copyWith({Map<dynamic, dynamic> value}) {
|
||||
return new HasMap(value: value ?? this.value);
|
||||
return HasMap(value: value ?? this.value);
|
||||
}
|
||||
|
||||
bool operator ==(other) {
|
||||
return other is _HasMap &&
|
||||
const MapEquality<dynamic, dynamic>(
|
||||
keys: const DefaultEquality(), values: const DefaultEquality())
|
||||
MapEquality<dynamic, dynamic>(
|
||||
keys: DefaultEquality(), values: DefaultEquality())
|
||||
.equals(other.value, value);
|
||||
}
|
||||
|
||||
|
@ -43,7 +43,7 @@ class HasMap implements _HasMap {
|
|||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const HasMapSerializer hasMapSerializer = const HasMapSerializer();
|
||||
const HasMapSerializer hasMapSerializer = HasMapSerializer();
|
||||
|
||||
class HasMapEncoder extends Converter<HasMap, Map> {
|
||||
const HasMapEncoder();
|
||||
|
@ -68,10 +68,10 @@ class HasMapSerializer extends Codec<HasMap, Map> {
|
|||
get decoder => const HasMapDecoder();
|
||||
static HasMap fromMap(Map map) {
|
||||
if (map['value'] == null) {
|
||||
throw new FormatException("Missing required field 'value' on HasMap.");
|
||||
throw FormatException("Missing required field 'value' on HasMap.");
|
||||
}
|
||||
|
||||
return new HasMap(value: _fromString(map['value']));
|
||||
return HasMap(value: _fromString(map['value']));
|
||||
}
|
||||
|
||||
static Map<String, dynamic> toMap(_HasMap model) {
|
||||
|
@ -79,7 +79,7 @@ class HasMapSerializer extends Codec<HasMap, Map> {
|
|||
return null;
|
||||
}
|
||||
if (model.value == null) {
|
||||
throw new FormatException("Missing required field 'value' on HasMap.");
|
||||
throw FormatException("Missing required field 'value' on HasMap.");
|
||||
}
|
||||
|
||||
return {'value': _toString(model.value)};
|
||||
|
|
|
@ -22,7 +22,7 @@ class WithEnum implements _WithEnum {
|
|||
|
||||
WithEnum copyWith(
|
||||
{WithEnumType type, List<int> finalList, Uint8List imageBytes}) {
|
||||
return new WithEnum(
|
||||
return WithEnum(
|
||||
type: type ?? this.type,
|
||||
finalList: finalList ?? this.finalList,
|
||||
imageBytes: imageBytes ?? this.imageBytes);
|
||||
|
@ -31,9 +31,9 @@ class WithEnum implements _WithEnum {
|
|||
bool operator ==(other) {
|
||||
return other is _WithEnum &&
|
||||
other.type == type &&
|
||||
const ListEquality<int>(const DefaultEquality<int>())
|
||||
ListEquality<int>(DefaultEquality<int>())
|
||||
.equals(other.finalList, finalList) &&
|
||||
const ListEquality().equals(other.imageBytes, imageBytes);
|
||||
ListEquality().equals(other.imageBytes, imageBytes);
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -55,7 +55,7 @@ class WithEnum implements _WithEnum {
|
|||
// SerializerGenerator
|
||||
// **************************************************************************
|
||||
|
||||
const WithEnumSerializer withEnumSerializer = const WithEnumSerializer();
|
||||
const WithEnumSerializer withEnumSerializer = WithEnumSerializer();
|
||||
|
||||
class WithEnumEncoder extends Converter<WithEnum, Map> {
|
||||
const WithEnumEncoder();
|
||||
|
@ -79,7 +79,7 @@ class WithEnumSerializer extends Codec<WithEnum, Map> {
|
|||
@override
|
||||
get decoder => const WithEnumDecoder();
|
||||
static WithEnum fromMap(Map map) {
|
||||
return new WithEnum(
|
||||
return WithEnum(
|
||||
type: map['type'] is WithEnumType
|
||||
? (map['type'] as WithEnumType)
|
||||
: (map['type'] is int
|
||||
|
@ -91,10 +91,10 @@ class WithEnumSerializer extends Codec<WithEnum, Map> {
|
|||
imageBytes: map['image_bytes'] is Uint8List
|
||||
? (map['image_bytes'] as Uint8List)
|
||||
: (map['image_bytes'] is Iterable<int>
|
||||
? new Uint8List.fromList(
|
||||
? Uint8List.fromList(
|
||||
(map['image_bytes'] as Iterable<int>).toList())
|
||||
: (map['image_bytes'] is String
|
||||
? new Uint8List.fromList(
|
||||
? Uint8List.fromList(
|
||||
base64.decode(map['image_bytes'] as String))
|
||||
: null)));
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue