1 /** Serialization. 2 * 3 * Test: dmd -version=show -preview=dip1000 -preview=in -vcolumns -d -I.. -i -debug -g -checkaction=context -allinst -unittest -main -run serialization.d 4 * Test: ldmd2 -d -fsanitize=address -I.. -i -debug -g -checkaction=context -allinst -unittest -main -run serialization.d 5 * Debug: ldmd2 -d -fsanitize=address -I.. -i -debug -g -checkaction=context -allinst -unittest -main serialization.d && lldb serialization 6 * 7 * TODO: Give compile-time error message when trying to serialize `void*` but not `void[]` 8 * TODO: Allocator supoprt 9 * TODO: Disable (de)serialization of nested types via `!__traits(isNested, T)` 10 * TODO: Support serialization of cycles and remove `Code.failureCycle` and `sc`. 11 * TODO: Use direct field setting for T only when __traits(isPOD, T) is true 12 otherwise use __traits(getOverloads, T, "__ctor"). 13 Try to use this to generalize (de)serialization of `std.json.JSONValue` 14 to a type-agnostic logic inside inside generic main generic `serializeRaw` 15 and `deserializeRaw`. 16 * TODO: Support bit-blitting of unions of only non-pointer fields. 17 * TODO: Only disable union's that contain any pointers. 18 Detect using __traits, std.traits or gc_traits.d. 19 * TODO: Avoid call to `new` when deserializing arrays of immutable elements 20 * (and perhaps classes) when `Sink` element type `E` is immutable. 21 * TODO: Exercise `JSONValue` (de)serialization with `nxt.sampling`. 22 * TODO: Optimize (de)serialization when `__traits(hasIndirections)` is avaiable and 23 `__traits(hasIndirections, T)` is false and `enablesSlicing` is set. 24 */ 25 module nxt.serialization; 26 27 import nxt.visiting : Addresses; 28 29 version = serialization_json_test; 30 31 /++ Serialization format. 32 +/ 33 @safe struct Format { 34 /++ Flag that integral types are packed via variable length encoding (VLE). 35 +/ 36 bool packIntegrals = false; 37 38 /++ Flag that scalar types are serialized in native (platform-dependent) byte-order. 39 Reason for settings this is usually to gain speed. 40 +/ 41 bool useNativeByteOrder = false; 42 43 /++ Returns: `true` iff `this` enables array slices of a scalar type 44 to be read/written without a loop, resulting in higher performance. 45 +/ 46 @property bool enablesSlicing() const pure nothrow @nogc 47 => (!packIntegrals && useNativeByteOrder); 48 } 49 50 /++ Status. 51 Converts to `bool true` for failures to simplify control flow in 52 (de)serialization functions. 53 +/ 54 struct Status { 55 /++ Status code. +/ 56 enum Code { 57 successful = 0, 58 failure = 1, 59 failureCycle = 2, 60 } 61 Code code; 62 alias code this; 63 bool opCast(T : bool)() const nothrow @nogc => code != Code.successful; 64 } 65 66 /++ Code (unit) type. 67 +/ 68 alias CodeUnitType = ubyte; 69 70 /++ Raw (binary) serialize `arg` to `sink` in format `fmt`. 71 TODO: Predict `initialAddrsCapacity` in callers. 72 +/ 73 Status serializeRaw(T, Sink)(scope ref Sink sink, in T arg, in Format fmt = Format.init, in size_t initialAddrsCapacity = 0) { 74 scope Addresses addrs; 75 () @trusted { 76 addrs.reserve(initialAddrsCapacity); // .reserve should be @trusted here 77 }(); 78 return serializeRaw_!(T, Sink)(sink, arg, addrs, fmt); 79 } 80 81 private Status serializeRaw_(T, Sink)(scope ref Sink sink, in T arg, scope ref Addresses addrs, in Format fmt = Format.init) { 82 alias E = typeof(Sink.init[][0]); // code unit (element) type 83 static assert(__traits(isUnsigned, E), 84 "Non-unsigned sink code unit (element) type " ~ E.stringof); 85 static assert(!is(T == union), 86 "Cannot serialize union type `" ~ T.stringof ~ "`"); 87 static if (is(T == struct) || is(T == union) || is(T == class)) { 88 Status serializeFields() { 89 import std.traits : FieldNameTuple; 90 foreach (fieldName; FieldNameTuple!T) 91 if (const st = serializeRaw_(sink, __traits(getMember, arg, fieldName), addrs, fmt)) 92 return st; 93 return Status(Status.Code.successful); 94 } 95 } 96 static if (is(T : __vector(U[N]), U, size_t N)) { // must come before isArithmetic 97 foreach (const ref elt; arg) 98 if (const st = serializeRaw_(sink, elt, addrs, fmt)) { return st; } 99 } else static if (__traits(isArithmetic, T)) { 100 static if (__traits(isIntegral, T)) { 101 static if (__traits(isUnsigned, T)) { 102 if (fmt.packIntegrals) { 103 if (arg < unsignedPrefixSentinel) { 104 const tmp = cast(E)arg; 105 assert(tmp != unsignedPrefixSentinel); 106 sink ~= tmp; // pack in single code unit 107 return Status(Status.Code.successful); 108 } 109 else 110 sink ~= unsignedPrefixSentinel; 111 } 112 } else { // isSigned 113 if (fmt.packIntegrals) { 114 if (arg >= byte.min+1 && arg <= byte.max) { 115 const tmp = cast(byte)arg; // pack in single code unit 116 assert(tmp != signedPrefixSentinel); 117 () @trusted { sink ~= ((cast(E*)&tmp)[0 .. 1]); }(); 118 return Status(Status.Code.successful); 119 } 120 else 121 sink ~= signedPrefixSentinel; // signed prefix 122 } 123 } 124 } else static if (__traits(isFloating, T) && is(T : real)) { 125 /+ TODO: pack small values +/ 126 } 127 static if (T.sizeof <= 8 && canSwapEndianness!(T)) { 128 import std.bitmanip : nativeToBigEndian; 129 if (!fmt.useNativeByteOrder) { 130 sink ~= arg.nativeToBigEndian[]; 131 return Status(Status.Code.successful); 132 } 133 } 134 // `T` for `T.sizeof == 1` or `T` being `real`: 135 () @trusted { sink ~= ((cast(E*)&arg)[0 .. T.sizeof]); }(); 136 } else static if (is(T == struct) || is(T == union)) { 137 static if (is(typeof(T.init[]) == U[], U)) { // hasSlicing 138 if (const st = serializeRaw_(sink, arg[], addrs, fmt)) { return st; } 139 } else { 140 if (const st = serializeFields()) { return st; } 141 } 142 } else static if (is(T == class) || is(T == U*, U)) { // isAddress 143 const bool isNull = arg is null; 144 if (const st = serializeRaw_(sink, isNull, addrs, fmt)) { return st; } 145 if (isNull) 146 return Status(Status.Code.successful); 147 import nxt.algorithm.searching : canFind; 148 void* addr; 149 () @trusted { addr = cast(void*)arg; }(); 150 if (addrs.canFind(addr)) { 151 // dbg("Cycle detected at `" ~ T.stringof ~ "`"); 152 return Status(Status.Code.failureCycle); 153 } 154 () @trusted { addrs ~= addr; }(); // `addrs`.lifetime <= `arg`.lifetime 155 static if (is(T == class)) { 156 if (const st = serializeFields()) { return st; } 157 } else { 158 if (const st = serializeRaw_(sink, *arg, addrs, fmt)) { return st; } 159 } 160 } else static if (is(T U : U[])) { // isArray 161 static if (!__traits(isStaticArray, T)) { 162 if (const st = serializeRaw_(sink, arg.length, addrs, fmt)) { return st; } 163 } 164 static if (__traits(isScalar, U)) { 165 if (fmt.enablesSlicing) { 166 () @trusted { sink ~= ((cast(E*)arg.ptr)[0 .. arg.length*U.sizeof]); }(); 167 return Status(Status.Code.successful); 168 } 169 } 170 static if (is(immutable U == immutable void)) { 171 ubyte[] raw; 172 () @trusted { raw = cast(typeof(raw))arg; }(); 173 if (const st = serializeRaw_(sink, raw, addrs, fmt)) { return st; } 174 } else { 175 foreach (const ref elt; arg) 176 if (const st = serializeRaw_(sink, elt, addrs, fmt)) { return st; } 177 } 178 } else static if (__traits(isAssociativeArray, T)) { 179 if (const st = serializeRaw_(sink, arg.length, addrs, fmt)) { return st; } 180 foreach (const ref elt; arg.byKeyValue) { 181 if (const st = serializeRaw_(sink, elt.key, addrs, fmt)) { return st; } 182 if (const st = serializeRaw_(sink, elt.value, addrs, fmt)) { return st; } 183 } 184 } else 185 static assert(0, "Cannot serialize `arg` of type `" ~ T.stringof ~ "`"); 186 return Status(Status.Code.successful); 187 } 188 189 /++ Raw (binary) deserialize `arg` from `sink` in format `fmt`. 190 +/ 191 Status deserializeRaw(T, Sink)(scope ref Sink sink, ref T arg, in Format fmt = Format.init) { 192 alias E = typeof(Sink.init[][0]); // code unit (element) type 193 static assert(__traits(isUnsigned, E), 194 "Non-unsigned sink code unit (element) type " ~ E.stringof); 195 static assert(!is(T == union), 196 "Cannot deserialize union type `" ~ T.stringof ~ "`"); 197 static if (__traits(hasMember, Sink, "data") && 198 is(immutable typeof(sink.data) == immutable E[])) { 199 auto data = sink.data; // l-value to pass by ref 200 const st_ = deserializeRaw!(T)(data, arg, fmt); // Appender arg => arg.data 201 sink = Sink(data); /+ TODO: avoid this because it allocates +/ 202 return st_; 203 } 204 import std.traits : Unqual; 205 Unqual!T* argP; 206 () @trusted { argP = cast(Unqual!T*)&arg; }(); 207 static if (is(T == struct) || is(T == union) || is(T == class)) { 208 import std.traits : FieldNameTuple; 209 Status deserializeFields() { 210 foreach (fieldName; FieldNameTuple!T) { 211 /+ TODO: maybe use *argP here instead: +/ 212 if (const st = deserializeRaw(sink, __traits(getMember, arg, fieldName), fmt)) { return st; } 213 } 214 return Status(Status.Code.successful); 215 } 216 } 217 static if (is(T : __vector(U[N]), U, size_t N)) { // must come before isArithmetic 218 foreach (const ref elt; arg) 219 if (const st = deserializeRaw(sink, elt, fmt)) { return st; } 220 } else static if (__traits(isArithmetic, T)) { 221 static if (__traits(isIntegral, T)) { 222 if (fmt.packIntegrals) { 223 auto tmp = sink.frontPop!(E)(); 224 static if (__traits(isUnsigned, T)) { 225 if (tmp != unsignedPrefixSentinel) { 226 *argP = cast(T)tmp; 227 return Status(Status.Code.successful); 228 } 229 } else { 230 if (tmp != signedPrefixSentinel) { 231 () @trusted {*argP = cast(T)*cast(byte*)&tmp;}(); // reinterpret 232 return Status(Status.Code.successful); 233 } 234 } 235 } 236 } else static if (__traits(isFloating, T) && is(T : real)) { 237 /+ TODO: unpack small values +/ 238 } 239 static if (T.sizeof <= 8 && canSwapEndianness!(T)) { 240 if (!fmt.useNativeByteOrder) { 241 *argP = sink.frontPopSwapEndian!(T)(); 242 return Status(Status.Code.successful); 243 } 244 } 245 // `T` for `T.sizeof == 1` or `T` being `real`: 246 *argP = sink.frontPop!(T)(); 247 } else static if (is(T == struct) || is(T == union)) { 248 static if (is(typeof(T.init[]) == U[], U)) { // hasSlicing 249 U[] tmp; // T was serialized via `T.opSlice` 250 if (const st = deserializeRaw(sink, tmp, fmt)) { return st; } 251 arg = T(tmp); 252 } else { 253 if (const st = deserializeFields()) { return st; } 254 } 255 } else static if (is(T == class) || is(T == U*, U)) { // isAddress 256 bool isNull; 257 if (const st = deserializeRaw(sink, isNull, fmt)) { return st; } 258 if (isNull) { 259 arg = null; 260 return Status(Status.Code.successful); 261 } 262 static if (is(T == class)) { 263 if (arg is null) { 264 alias ctors = typeof(__traits(getOverloads, TestClass, "__ctor")); 265 static assert(ctors.length <= 1, "Cannot deserialize `arg` of type `" ~ T.stringof ~ "` as it has multiple constructors"); 266 static if (ctors.length == 1) { 267 import std.traits : ParameterTypeTuple; 268 alias CtorParams = ParameterTypeTuple!(ctors[0]); 269 CtorParams params; 270 // pragma(msg, __FILE__, "(", __LINE__, ",1): Debug: ", CtorParams); 271 /+ TODO: Somehow deserialize `CtorParams` and pass them to constructor probably when compiler has native support for passing tuples to functions. +/ 272 static if (is(typeof(() @safe pure { return new T(params); }))) { 273 arg = new T(); 274 } else { 275 static assert(0, "Cannot deserialize `arg` of type `" ~ T.stringof ~ "` as it has no default constructor"); 276 } 277 } else { 278 static if (is(typeof(() @safe pure { return new T(); }))) { 279 arg = new T(); 280 } else { 281 static assert(0, "Cannot deserialize `arg` of type `" ~ T.stringof ~ "` as it has no default constructor"); 282 } 283 } 284 } 285 if (const st = deserializeFields()) { return st; } 286 } else { 287 if (arg is null) 288 arg = new typeof(*T.init); 289 if (const st = deserializeRaw(sink, *arg, fmt)) { return st; } 290 } 291 } else static if (is(T U : U[])) { // isArray 292 static if (!__traits(isStaticArray, T)) { 293 typeof(T.init.length) length; 294 if (const st = deserializeRaw(sink, length, fmt)) { return st; } 295 /+ TODO: avoid allocation if `E` is `immutable` and `U` is `immutable` and both have .sizeof 1: +/ 296 arg.length = length; // allocates. TODO: use allocator 297 } 298 static if (__traits(isScalar, U)) { 299 if (fmt.enablesSlicing) { 300 () @trusted { arg = (cast(U*)sink[].ptr)[0 .. arg.length]; }(); 301 sink = cast(Sink)(sink[][arg.length * U.sizeof .. $]); 302 return Status(Status.Code.successful); 303 } 304 } 305 foreach (ref elt; arg) 306 if (const st = deserializeRaw(sink, elt, fmt)) { return st; } 307 } else static if (__traits(isAssociativeArray, T)) { 308 typeof(T.init.length) length; 309 if (const st = deserializeRaw(sink, length, fmt)) { return st; } 310 /+ TODO: isMap: arg.capacity = length; or arg.reserve(length); +/ 311 foreach (_; 0 .. length) { 312 /* WARNING: `key` and `value` must not be put in outer scope as 313 that will lead to keys being overwritten. */ 314 typeof(T.init.keys[0]) key; 315 typeof(T.init.values[0]) value; 316 if (const st = deserializeRaw(sink, key, fmt)) { return st; } 317 if (const st = deserializeRaw(sink, value, fmt)) { return st; } 318 arg[key] = value; 319 } 320 } else 321 static assert(0, "Cannot deserialize `arg` of type `" ~ T.stringof ~ "`"); 322 return Status(Status.Code.successful); 323 } 324 325 private static immutable CodeUnitType unsignedPrefixSentinel = 0b_1111_1111; 326 private static immutable CodeUnitType signedPrefixSentinel = 0b_1000_0000; 327 328 private T frontPop(T, Sink)(ref Sink sink) in(T.sizeof <= sink[].length) { 329 T* ptr; 330 () @trusted { ptr = (cast(T*)sink[][0 .. T.sizeof]); }(); 331 typeof(return) result = *ptr; /+ TODO: unaligned access +/ 332 sink = cast(Sink)(sink[][T.sizeof .. $]); 333 return result; 334 } 335 336 private T frontPopSwapEndian(T, Sink)(ref Sink sink) if (T.sizeof >= 2) { 337 enum sz = T.sizeof; 338 import std.bitmanip : bigEndianToNative; 339 typeof(return) result = sink[][0 .. sz].bigEndianToNative!(T, sz); /+ TODO: unaligned access +/ 340 sink = cast(Sink)(sink[][sz .. $]); 341 return result; 342 } 343 344 /++ Is true iff `T` has swappable endianness (byte-order). +/ 345 private enum canSwapEndianness(T) = (T.sizeof >= 2 && T.sizeof <= 8 && __traits(isArithmetic, T)); 346 347 /// enum both sink type to trigger instantiation 348 version (none) 349 @safe pure nothrow unittest { 350 foreach (const packIntegrals; [false, true]) { 351 foreach (const useNativeByteOrder; [false, true]) { 352 const fmt = Format(packIntegrals, useNativeByteOrder); 353 static foreach (Sink; AliasSeq!(ArraySink, AppenderSink)) {{ // trigger instantiation 354 Sink sink; 355 alias T = void[]; 356 T t = [1,2,3,4]; 357 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 358 // assert(sink[].length == (packIntegrals ? 1 : T.sizeof)); 359 // T u; 360 // assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 361 // assert(sink[].length == 0); 362 // assert(t == u); 363 }} 364 } 365 } 366 } 367 368 /// enum both sink type to trigger instantiation 369 @safe pure nothrow unittest { 370 foreach (const packIntegrals; [false, true]) { 371 foreach (const useNativeByteOrder; [false, true]) { 372 const fmt = Format(packIntegrals, useNativeByteOrder); 373 static foreach (Sink; AliasSeq!(ArraySink, AppenderSink)) {{ // trigger instantiation 374 Sink sink; 375 alias T = TestEnum; 376 T t; 377 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 378 assert(sink[].length == (packIntegrals ? 1 : T.sizeof)); 379 T u; 380 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 381 assert(sink[].length == 0); 382 assert(t == u); 383 }} 384 } 385 } 386 } 387 388 /// empty {struct|union} 389 @safe pure nothrow unittest { 390 foreach (const packIntegrals; [false, true]) { 391 foreach (const useNativeByteOrder; [false, true]) { 392 const fmt = Format(packIntegrals, useNativeByteOrder); 393 AppenderSink sink; 394 struct S {} 395 struct U {} 396 static foreach (T; AliasSeq!(S, U)) {{ 397 T t; 398 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 399 assert(sink[].length == 0); 400 T u; 401 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 402 assert(sink[].length == 0); 403 static if (!is(T == class)) { 404 assert(t == u); 405 } 406 }} 407 } 408 } 409 } 410 411 /// class type 412 @safe pure nothrow unittest { 413 foreach (const packIntegrals; [false, true]) { 414 foreach (const useNativeByteOrder; [false, true]) { 415 const fmt = Format(packIntegrals, useNativeByteOrder); 416 AppenderSink sink; 417 alias T = TestClass; 418 T t = new T(11,22); 419 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 420 assert(sink[].length != 0); 421 if (fmt.packIntegrals) 422 assert(sink[] == [0, 11, 22]); 423 T u; 424 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 425 assert(sink[].length == 0); 426 assert(t.tupleof == u.tupleof); 427 } 428 } 429 } 430 431 /// cycle-struct type 432 version (none) /+ TODO: activate +/ 433 @trusted unittest { 434 foreach (const packIntegrals; [false, true]) { 435 foreach (const useNativeByteOrder; [false, true]) { 436 const fmt = Format(packIntegrals, useNativeByteOrder); 437 AppenderSink sink; 438 alias T = CycleStruct; 439 T t = new T(); 440 t.x = 42; 441 () @trusted { 442 t.parent = &t; 443 }(); 444 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 445 assert(sink[].length != 0); 446 if (fmt.packIntegrals) 447 assert(sink == [0, 11, 22]); 448 T u; 449 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 450 assert(sink[].length == 0); 451 assert(t.tupleof == u.tupleof); 452 } 453 } 454 } 455 456 /// cycle-class type 457 @trusted unittest { 458 foreach (const packIntegrals; [false, true]) { 459 foreach (const useNativeByteOrder; [false, true]) { 460 const fmt = Format(packIntegrals, useNativeByteOrder); 461 AppenderSink sink; 462 alias T = CycleClass; 463 T t = new T(42); 464 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.failureCycle)); 465 assert(sink[].length != 0); 466 /+ TODO: activate when cycles are supported: +/ 467 // T u; 468 // assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 469 // assert(sink[].length == 0); 470 // assert(t.tupleof == u.tupleof); 471 } 472 } 473 } 474 475 /// struct with static field 476 @safe pure nothrow unittest { 477 foreach (const packIntegrals; [false, true]) { 478 foreach (const useNativeByteOrder; [false, true]) { 479 const fmt = Format(packIntegrals, useNativeByteOrder); 480 AppenderSink sink; 481 struct T { static int _; } 482 T t; 483 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 484 assert(sink[].length == 0); 485 T u; 486 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 487 assert(sink[].length == 0); 488 assert(t == u); 489 } 490 } 491 } 492 493 /// struct with immutable field 494 @safe pure nothrow unittest { 495 foreach (const packIntegrals; [false, true]) { 496 foreach (const useNativeByteOrder; [false, true]) { 497 const fmt = Format(packIntegrals, useNativeByteOrder); 498 AppenderSink sink; 499 struct T { immutable int _; } 500 T t; 501 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 502 assert(sink[].length == (packIntegrals ? 1 : T.sizeof)); 503 T u; 504 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 505 assert(sink[].length == 0); 506 assert(t == u); 507 } 508 } 509 } 510 511 /// {char|wchar|dchar} 512 @safe pure nothrow unittest { 513 foreach (const packIntegrals; [false, true]) { 514 foreach (const useNativeByteOrder; [false, true]) { 515 const fmt = Format(packIntegrals, useNativeByteOrder); 516 AppenderSink sink; 517 static foreach (T; CharTypes) {{ 518 foreach (const T t; 0 .. 127+1) { 519 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 520 assert(sink[].length == (packIntegrals ? 1 : T.sizeof)); 521 T u; 522 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 523 assert(sink[].length == 0); 524 assert(t == u); 525 } 526 }} 527 } 528 } 529 } 530 531 /// {char|wchar|dchar}[] 532 @safe pure nothrow unittest { 533 foreach (const packIntegrals; [false, true]) { 534 foreach (const useNativeByteOrder; [false, true]) { 535 const fmt = Format(packIntegrals, useNativeByteOrder); 536 AppenderSink sink; 537 import std.meta : AliasSeq; 538 static foreach (E; CharTypes) {{ 539 alias T = E[]; 540 T t; 541 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 542 assert(sink[].length != 0); 543 T u; 544 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 545 assert(sink[].length == 0); 546 assert(t == u); 547 }} 548 } 549 } 550 } 551 552 /// signed integral 553 @safe pure nothrow unittest { 554 foreach (const packIntegrals; [false, true]) { 555 foreach (const useNativeByteOrder; [false, true]) { 556 const fmt = Format(packIntegrals, useNativeByteOrder); 557 AppenderSink sink; 558 import std.meta : AliasSeq; 559 static foreach (T; AliasSeq!(byte, short, int, long)) {{ 560 foreach (const T t; byte.min+1 .. byte.max+1) { 561 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 562 assert(sink[].length == (packIntegrals ? 1 : T.sizeof)); 563 T u; 564 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 565 assert(sink[].length == 0); 566 assert(t == u); 567 } 568 }} 569 } 570 } 571 } 572 573 /// core.simd vector types 574 @safe pure nothrow unittest { 575 foreach (const packIntegrals; [false, true]) { 576 foreach (const useNativeByteOrder; [false, true]) { 577 const fmt = Format(packIntegrals, useNativeByteOrder); 578 AppenderSink sink; 579 import std.meta : AliasSeq; 580 import core.simd; 581 /+ TODO: support more core.simd types +/ 582 static foreach (T; AliasSeq!(byte16, float4, double2)) {{ 583 T t = 0; 584 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 585 assert(sink[].length == T.sizeof); 586 T u; 587 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 588 assert(sink[].length == 0); 589 assert(t[] == u[]); 590 }} 591 } 592 } 593 } 594 595 /// unsigned integral 596 @safe pure nothrow unittest { 597 foreach (const packIntegrals; [false, true]) { 598 foreach (const useNativeByteOrder; [false, true]) { 599 const fmt = Format(packIntegrals, useNativeByteOrder); 600 AppenderSink sink; 601 import std.meta : AliasSeq; 602 static foreach (T; AliasSeq!(ubyte, ushort, uint, ulong)) {{ 603 foreach (const T t; 0 .. ubyte.max) { 604 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 605 assert(sink[].length == (packIntegrals ? 1 : T.sizeof)); 606 T u; 607 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 608 assert(sink[].length == 0); 609 assert(t == u); 610 } 611 }} 612 } 613 } 614 } 615 616 /// floating point 617 @safe pure nothrow unittest { 618 foreach (const packIntegrals; [false, true]) { 619 foreach (const useNativeByteOrder; [false, true]) { 620 const fmt = Format(packIntegrals, useNativeByteOrder); 621 AppenderSink sink; 622 import std.meta : AliasSeq; 623 static foreach (T; AliasSeq!(float, double, real)) {{ 624 foreach (const T t; 0 .. ubyte.max) { 625 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 626 assert(sink[].length == (packIntegrals ? T.sizeof : T.sizeof)); 627 T u; 628 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 629 assert(sink[].length == 0); 630 assert(t == u); 631 } 632 }} 633 } 634 } 635 } 636 637 /// integral pointer 638 @trusted pure nothrow unittest { 639 foreach (const packIntegrals; [false, true]) { 640 foreach (const useNativeByteOrder; [false, true]) { 641 const fmt = Format(packIntegrals, useNativeByteOrder); 642 AppenderSink sink; 643 import std.meta : AliasSeq; 644 static foreach (E; AliasSeq!(uint, ulong)) {{ 645 E val = 42; 646 struct T { E* p1, p2; } 647 T t = T(null,&val); 648 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 649 assert(sink[].length != 0); 650 T u; 651 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 652 assert(sink[].length == 0); 653 assert(t.p1 is u.p1); 654 assert(*t.p2 == *u.p2); 655 }} 656 } 657 } 658 } 659 660 /// static array 661 @safe pure nothrow unittest { 662 foreach (const packIntegrals; [false, true]) { 663 foreach (const useNativeByteOrder; [false, true]) { 664 const fmt = Format(packIntegrals, useNativeByteOrder); 665 AppenderSink sink; 666 enum n = 3; 667 alias T = int[n]; 668 T t = [11,22,33]; 669 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 670 assert(sink[].length != 0); 671 T u; 672 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 673 assert(sink[].length == 0); 674 assert(t == u); 675 } 676 } 677 } 678 679 /// dynamic array 680 @safe pure nothrow unittest { 681 foreach (const packIntegrals; [false, true]) { 682 foreach (const useNativeByteOrder; [false, true]) { 683 const fmt = Format(packIntegrals, useNativeByteOrder); 684 AppenderSink sink; 685 alias T = int[]; 686 T t = [11,22,33]; 687 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 688 assert(sink[].length == (fmt.packIntegrals ? (1 + t.length) : 20)); 689 T u; 690 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 691 assert(sink[].length == 0); 692 693 assert(t == u); 694 } 695 } 696 } 697 698 /// associative array 699 @safe pure nothrow unittest { 700 foreach (const packIntegrals; [false, true]) { 701 foreach (const useNativeByteOrder; [false, true]) { 702 const fmt = Format(packIntegrals, useNativeByteOrder); 703 AppenderSink sink; 704 alias T = int[int]; 705 T t = [1: 1, 2: 2]; 706 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 707 assert(sink[].length != 0); 708 T u; 709 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 710 assert(sink[].length == 0); 711 assert(t == u); 712 } 713 } 714 } 715 716 /// associative array 717 @safe pure nothrow unittest { 718 foreach (const packIntegrals; [false, true]) { 719 foreach (const useNativeByteOrder; [false, true]) { 720 const fmt = Format(packIntegrals, useNativeByteOrder); 721 AppenderSink sink; 722 alias T = int[string]; 723 T t = ["1": 3, "2": 4]; 724 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 725 assert(sink[].length != 0); 726 T u = T.init; 727 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 728 assert(sink[].length == 0); 729 assert(t == u); 730 } 731 } 732 } 733 734 /// empty `std.array.Appender` via slicing 735 @safe pure nothrow unittest { 736 foreach (const packIntegrals; [false, true]) { 737 foreach (const useNativeByteOrder; [false, true]) { 738 const fmt = Format(packIntegrals, useNativeByteOrder); 739 import std.array : Appender; 740 AppenderSink sink; 741 alias A = int[]; 742 alias T = Appender!(A); 743 T t = []; 744 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 745 assert(sink[].length == (fmt.packIntegrals ? 1 : 8) + t.data.length); 746 T u; 747 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 748 assert(sink[].length == 0); 749 assert(t[] == u[]); 750 } 751 } 752 } 753 754 /// populated `std.array.Appender` via slicing 755 @safe pure nothrow unittest { 756 foreach (const packIntegrals; [false, true]) { 757 foreach (const useNativeByteOrder; [false, true]) { 758 const fmt = Format(packIntegrals, useNativeByteOrder); 759 import std.array : Appender; 760 AppenderSink sink; 761 alias A = int[]; 762 alias T = Appender!(A); 763 A a = [11,22,33]; 764 T t = a; 765 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 766 assert(sink[].length == (fmt.packIntegrals ? 4 : 20)); 767 AppenderSink asink; 768 assert(!asink.serializeRaw(a, fmt)); 769 assert(asink[].length == (fmt.packIntegrals ? 4 : 20)); 770 assert(sink[] == asink[]); 771 T u; 772 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 773 assert(sink[].length == 0); 774 assert(t[] == u[]); 775 } 776 } 777 } 778 779 /// aggregate type 780 @safe pure nothrow unittest { 781 foreach (const packIntegrals; [false, true]) { 782 foreach (const useNativeByteOrder; [false, true]) { 783 const fmt = Format(packIntegrals, useNativeByteOrder); 784 AppenderSink sink; 785 struct P { int x, y; int* p1, p2; char[3] ch3; char ch; wchar wc; dchar dc; int[int] aa; } 786 struct T { int x, y; long l; float f; double d; real r; bool b1, b2; P p; } 787 T t = T(0x01234567,0x76543210, 0x01234567_01234567, 3.14,3.14,3.14, false,true, 788 P(2,3, null,null, "abc", 'a', 'b', 'c', [11:-11,-22:22,-33:-33])); 789 assert(sink.serializeRaw(t, fmt) == Status(Status.Code.successful)); 790 T u; 791 assert(sink.deserializeRaw(u, fmt) == Status(Status.Code.successful)); 792 assert(sink[].length == 0); 793 assert(t == u); 794 } 795 } 796 } 797 798 version (serialization_json_test) 799 private import std.json : JSONValue, JSONType, parseJSON; 800 801 /++ Raw (binary) serialize `JSONValue arg` to `sink` in format `fmt`. +/ 802 version (serialization_json_test) 803 private Status serializeRaw_(T : JSONValue, AppenderSink)(scope ref AppenderSink sink, in T arg, scope ref Addresses addrs, in Format fmt = Format.init) { 804 if (const st = serializeRaw_(sink, arg.type, addrs, fmt)) { return st; } 805 final switch (arg.type) { 806 case JSONType.integer: 807 return serializeRaw_(sink, arg.integer, addrs, fmt); 808 case JSONType.uinteger: 809 return serializeRaw_(sink, arg.uinteger, addrs, fmt); 810 case JSONType.float_: 811 return serializeRaw_(sink, arg.floating, addrs, fmt); 812 case JSONType..string: 813 return serializeRaw_(sink, arg.str, addrs, fmt); 814 case JSONType.object: 815 return serializeRaw_(sink, arg.object, addrs, fmt); 816 case JSONType.array: 817 return serializeRaw_(sink, arg.array, addrs, fmt); 818 case JSONType.true_: 819 case JSONType.false_: 820 case JSONType.null_: 821 return Status(Status.Code.successful); 822 } 823 } 824 825 /++ Raw (binary) deserialize `JSONValue arg` from `sink` in format `fmt`. +/ 826 version (serialization_json_test) 827 Status deserializeRaw(T : JSONValue, AppenderSink)(scope ref AppenderSink sink, ref T arg, in Format fmt = Format.init) { 828 JSONType type; 829 if (const st = deserializeRaw(sink, type, fmt)) { return st; } 830 typeof(return) st; 831 final switch (type) { 832 case JSONType.integer: 833 typeof(arg.integer) value; 834 st = deserializeRaw(sink, value, fmt); 835 if (st == Status(Status.Code.successful)) arg = JSONValue(value); 836 break; 837 case JSONType.uinteger: 838 typeof(arg.uinteger) value; 839 st = deserializeRaw(sink, value, fmt); 840 if (st == Status(Status.Code.successful)) arg = JSONValue(value); 841 break; 842 case JSONType.float_: 843 typeof(arg.floating) value; 844 st = deserializeRaw(sink, value, fmt); 845 if (st == Status(Status.Code.successful)) arg = JSONValue(value); 846 break; 847 case JSONType..string: 848 typeof(arg.str) value; 849 st = deserializeRaw(sink, value, fmt); 850 if (st == Status(Status.Code.successful)) arg = JSONValue(value); 851 break; 852 case JSONType.object: 853 typeof(arg.object) value; 854 st = deserializeRaw(sink, value, fmt); 855 if (st == Status(Status.Code.successful)) arg = JSONValue(value); 856 break; 857 case JSONType.array: 858 typeof(arg.array) value; 859 st = deserializeRaw(sink, value, fmt); 860 if (st == Status(Status.Code.successful)) arg = JSONValue(value); 861 break; 862 case JSONType.true_: 863 arg = true; 864 st = Status(Status.Code.successful); 865 break; 866 case JSONType.false_: 867 arg = false; 868 st = Status(Status.Code.successful); 869 break; 870 case JSONType.null_: 871 arg = null; 872 st = Status(Status.Code.successful); 873 break; 874 } 875 return st; 876 } 877 878 // { "optional": true } 879 version (serialization_json_test) 880 @trusted unittest { 881 foreach (const s; [`false`, 882 `true`, 883 `null`, 884 `{}`, 885 `12`, 886 `"x"`, 887 `[1,2]`, 888 `[1,2,[3,4,5,"x","y"],"a",null]`, 889 `[1,3.14,"x",null]`, 890 `{ "optional":false }`, 891 `{ "optional":true }`, 892 `{ "optional":null }`, 893 `{ "a":"a", }`, 894 `{ "a":"a", "a":"a", }`, 895 `{ "a":1, "a":2, }`, 896 `{ "":1, "":2, }`, 897 `{ "":1, "b":2, }`, 898 `{ "a":1, "":2, }`, 899 `{ "a":1, "b":2, }`, 900 /+ TODO: this fails: readLargeFile, +/ 901 ]) { 902 foreach (const packIntegrals; [false, true]) { 903 foreach (const useNativeByteOrder; [false, true]) { 904 const fmt = Format(packIntegrals, useNativeByteOrder); 905 AppenderSink sink; 906 alias T = JSONValue; 907 const T t = s.parseJSON(); 908 assert(sink.serializeRaw!(JSONValue)(t, fmt) == Status(Status.Code.successful)); 909 assert(sink[].length != 0); 910 T u; 911 assert(sink.deserializeRaw!(JSONValue)(u, fmt) == Status(Status.Code.successful)); 912 assert(sink[].length == 0); 913 assert(t == u); 914 } 915 } 916 } 917 } 918 919 version (none) 920 version (serialization_json_test) 921 private @system string readLargeFile() { 922 import std.path : expandTilde; 923 import std.file : readText; 924 return "~/Downloads/large-file.json".expandTilde.readText; 925 } 926 927 version (unittest) { 928 import std.array : Appender; 929 import std.meta : AliasSeq; 930 private alias ArraySink = CodeUnitType[]; 931 private alias AppenderSink = Appender!(ArraySink); 932 private alias CharTypes = AliasSeq!(char, wchar, dchar); 933 private enum TestEnum { first, second, third } 934 private class TestClass { 935 int a, b; 936 this(int a = 0, int b = 0) @safe pure nothrow @nogc { 937 this.a = a; 938 this.b = b; 939 } 940 } 941 private class CycleClass { 942 this(int x = 0) @safe pure nothrow @nogc { 943 this.x = x; 944 this.parent = this; // self-reference 945 } 946 int x; 947 CycleClass parent; 948 } 949 private class CycleStruct { 950 int x; 951 CycleStruct* parent; 952 } 953 import std.traits : ParameterTypeTuple; 954 alias ConstructorParams = ParameterTypeTuple!(typeof(__traits(getOverloads, TestClass, "__ctor")[0])); 955 static assert(is(ConstructorParams == AliasSeq!(int, int))); 956 debug import nxt.debugio; 957 }