|
| 1 | +/******************************************************************************* |
| 2 | +
|
| 3 | + YAML serialization helper |
| 4 | +
|
| 5 | +*******************************************************************************/ |
| 6 | + |
| 7 | +module dub.recipe.yaml; |
| 8 | + |
| 9 | +import dub.internal.vibecompat.data.json; |
| 10 | + |
| 11 | +import std.algorithm; |
| 12 | +import std.array : appender, Appender; |
| 13 | +import std.bigint; |
| 14 | +import std.format; |
| 15 | +import std.range; |
| 16 | + |
| 17 | +package string toYAML (Json json) { |
| 18 | + auto sb = appender!string(); |
| 19 | + serializeHelper(json, sb, 0); |
| 20 | + return sb.data; |
| 21 | +} |
| 22 | + |
| 23 | +package void toYAML (R) (Json json, ref R dst) { |
| 24 | + serializeHelper(json, dst, 0); |
| 25 | +} |
| 26 | + |
| 27 | +private void serializeHelper (R) (Json value, ref R dst, size_t indent, bool skipFirstIndent = false) { |
| 28 | + final switch (value.type) { |
| 29 | + case Json.Type.object: |
| 30 | + foreach (fieldName; FieldOrder) { |
| 31 | + if (auto ptr = fieldName in value) { |
| 32 | + serializeField(dst, fieldName, *ptr, skipFirstIndent ? 0 : indent); |
| 33 | + skipFirstIndent = false; |
| 34 | + } |
| 35 | + } |
| 36 | + foreach (string key, fieldValue; value) { |
| 37 | + if (FieldOrder.canFind(key)) continue; |
| 38 | + serializeField(dst, key, fieldValue, skipFirstIndent ? 0 : indent); |
| 39 | + skipFirstIndent = false; |
| 40 | + } |
| 41 | + break; |
| 42 | + case Json.Type.array: |
| 43 | + foreach (size_t idx, element; value) { |
| 44 | + formattedWrite(dst, "%*.*0$s- ", indent, ` `); |
| 45 | + |
| 46 | + if (element.isScalar) { |
| 47 | + serializeHelper(element, dst, 0); |
| 48 | + } else { |
| 49 | + serializeHelper(element, dst, indent + 2, true); |
| 50 | + } |
| 51 | + } |
| 52 | + break; |
| 53 | + case Json.Type.string: |
| 54 | + formattedWrite(dst, `"%s"`, value.get!string); |
| 55 | + break; |
| 56 | + case Json.Type.bool_: |
| 57 | + dst.put(value.get!bool ? "true" : "false"); |
| 58 | + break; |
| 59 | + case Json.Type.null_: |
| 60 | + dst.put("null"); |
| 61 | + break; |
| 62 | + case Json.Type.int_: |
| 63 | + formattedWrite(dst, "%s", value.get!long); |
| 64 | + break; |
| 65 | + case Json.Type.bigInt: |
| 66 | + formattedWrite(dst, "%s", value.get!BigInt); |
| 67 | + break; |
| 68 | + case Json.Type.float_: |
| 69 | + formattedWrite(dst, "%s", value.get!double); |
| 70 | + break; |
| 71 | + case Json.Type.undefined: |
| 72 | + break; |
| 73 | + } |
| 74 | + if (value.isScalar) |
| 75 | + dst.put("\n"); |
| 76 | +} |
| 77 | + |
| 78 | +private void serializeField (R) (ref R dst, string key, Json fieldValue, size_t indent) { |
| 79 | + formattedWrite(dst, "%*.*0$s%s:", indent, ` `, key); |
| 80 | + if (fieldValue.isScalar) { |
| 81 | + dst.put(" "); |
| 82 | + serializeHelper(fieldValue, dst, 0); |
| 83 | + } else { |
| 84 | + dst.put("\n"); |
| 85 | + serializeHelper(fieldValue, dst, indent + 2); |
| 86 | + } |
| 87 | +} |
| 88 | + |
| 89 | +private bool isScalar(Json value) { |
| 90 | + return value.type != Json.Type.object && value.type != Json.Type.array; |
| 91 | +} |
| 92 | + |
| 93 | +/// To get a better formatted YAML out of the box |
| 94 | +private immutable FieldOrder = [ |
| 95 | + "name", "description", "homepage", "authors", "copyright", "license", |
| 96 | + "toolchainRequirements", "mainSourceFile", "dependencies", "configurations", |
| 97 | +]; |
0 commit comments