diff options
author | Kitson Kelly <me@kitsonkelly.com> | 2020-03-29 04:03:49 +1100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2020-03-28 13:03:49 -0400 |
commit | bced52505f32d6cca4f944bb610a8a26767908a8 (patch) | |
tree | da49a5df4b7bd6f8306248069228cd6bd0db1303 /std/encoding | |
parent | 1397b8e0e7c85762e19d88fde103342bfa563360 (diff) |
Update to Prettier 2 and use ES Private Fields (#4498)
Diffstat (limited to 'std/encoding')
36 files changed, 268 insertions, 271 deletions
diff --git a/std/encoding/README.md b/std/encoding/README.md index 2b2d416b1..e6604c605 100644 --- a/std/encoding/README.md +++ b/std/encoding/README.md @@ -39,7 +39,7 @@ const string = "a,b,c\nd,e,f"; console.log( await parseCsv(string, { - header: false + header: false, }) ); // output: @@ -161,9 +161,9 @@ import { stringify } from "./parser.ts"; const obj = { bin: [ { name: "deno", path: "cli/main.rs" }, - { name: "deno_core", path: "src/foo.rs" } + { name: "deno_core", path: "src/foo.rs" }, ], - nib: [{ name: "node", path: "not_found" }] + nib: [{ name: "node", path: "not_found" }], }; const tomlString = stringify(obj); ``` diff --git a/std/encoding/base32_test.ts b/std/encoding/base32_test.ts index 28550d57a..2bd7acea1 100644 --- a/std/encoding/base32_test.ts +++ b/std/encoding/base32_test.ts @@ -6,7 +6,7 @@ import { encode, decode } from "./base32.ts"; // Lifted from https://stackoverflow.com/questions/38987784 const fromHexString = (hexString: string): Uint8Array => - new Uint8Array(hexString.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))); + new Uint8Array(hexString.match(/.{1,2}/g)!.map((byte) => parseInt(byte, 16))); const toHexString = (bytes: Uint8Array): string => bytes.reduce((str, byte) => str + byte.toString(16).padStart(2, "0"), ""); @@ -34,56 +34,56 @@ const testCases = [ ["ddf80ebe21bf1b1e12a64c5cc6a74b5d92dd", "3X4A5PRBX4NR4EVGJROMNJ2LLWJN2==="], [ "c0cae52c6f641ce04a7ee5b9a8fa8ded121bca", - "YDFOKLDPMQOOAST64W42R6UN5UJBXSQ=" + "YDFOKLDPMQOOAST64W42R6UN5UJBXSQ=", ], [ "872840a355c8c70586f462c9e669ee760cb3537e", - "Q4UEBI2VZDDQLBXUMLE6M2POOYGLGU36" + "Q4UEBI2VZDDQLBXUMLE6M2POOYGLGU36", ], [ "5773fe22662818a120c5688824c935fe018208a496", - "K5Z74ITGFAMKCIGFNCECJSJV7YAYECFESY======" + "K5Z74ITGFAMKCIGFNCECJSJV7YAYECFESY======", ], [ "416e23abc524d1b85736e2bea6cfecd5192789034a28", - "IFXCHK6FETI3QVZW4K7KNT7M2UMSPCIDJIUA====" + "IFXCHK6FETI3QVZW4K7KNT7M2UMSPCIDJIUA====", ], [ "83d2386ebdd7e8e818ec00e3ccd882aa933b905b7e2e44", - "QPJDQ3V527UOQGHMADR4ZWECVKJTXEC3PYXEI===" + "QPJDQ3V527UOQGHMADR4ZWECVKJTXEC3PYXEI===", ], [ "a2fa8b881f3b8024f52745763c4ae08ea12bdf8bef1a72f8", - "UL5IXCA7HOACJ5JHIV3DYSXAR2QSXX4L54NHF6A=" + "UL5IXCA7HOACJ5JHIV3DYSXAR2QSXX4L54NHF6A=", ], [ "b074ae8b9efde0f17f37bccadde006d039997b59c8efb05add", - "WB2K5C467XQPC7ZXXTFN3YAG2A4ZS62ZZDX3AWW5" + "WB2K5C467XQPC7ZXXTFN3YAG2A4ZS62ZZDX3AWW5", ], [ "764fef941aee7e416dc204ae5ab9c5b9ce644567798e6849aea9", - "OZH67FA25Z7EC3OCASXFVOOFXHHGIRLHPGHGQSNOVE======" + "OZH67FA25Z7EC3OCASXFVOOFXHHGIRLHPGHGQSNOVE======", ], [ "4995d9811f37f59797d7c3b9b9e5325aa78277415f70f4accf588c", - "JGK5TAI7G72ZPF6XYO43TZJSLKTYE52BL5YPJLGPLCGA====" + "JGK5TAI7G72ZPF6XYO43TZJSLKTYE52BL5YPJLGPLCGA====", ], [ "24f0812ca8eed58374c11a7008f0b262698b72fd2792709208eaacb2", - "ETYICLFI53KYG5GBDJYAR4FSMJUYW4X5E6JHBEQI5KWLE===" + "ETYICLFI53KYG5GBDJYAR4FSMJUYW4X5E6JHBEQI5KWLE===", ], [ "d70692543810d4bf50d81cf44a55801a557a388a341367c7ea077ca306", - "24DJEVBYCDKL6UGYDT2EUVMADJKXUOEKGQJWPR7KA56KGBQ=" + "24DJEVBYCDKL6UGYDT2EUVMADJKXUOEKGQJWPR7KA56KGBQ=", ], [ "6e08a89ca36b677ff8fe99e68a1241c8d8cef2570a5f60b6417d2538b30c", - "NYEKRHFDNNTX76H6THTIUESBZDMM54SXBJPWBNSBPUSTRMYM" + "NYEKRHFDNNTX76H6THTIUESBZDMM54SXBJPWBNSBPUSTRMYM", ], [ "f2fc2319bd29457ccd01e8e194ee9bd7e97298b6610df4ab0f3d5baa0b2d7ccf69829edb74edef", - "6L6CGGN5FFCXZTIB5DQZJ3U327UXFGFWMEG7JKYPHVN2UCZNPTHWTAU63N2O33Y=" - ] + "6L6CGGN5FFCXZTIB5DQZJ3U327UXFGFWMEG7JKYPHVN2UCZNPTHWTAU63N2O33Y=", + ], ]; Deno.test({ @@ -92,7 +92,7 @@ Deno.test({ for (const [bin, b32] of testCases) { assertEquals(encode(fromHexString(bin)), b32); } - } + }, }); Deno.test({ @@ -101,7 +101,7 @@ Deno.test({ for (const [bin, b32] of testCases) { assertEquals(toHexString(decode(b32)), bin); } - } + }, }); Deno.test({ @@ -117,7 +117,7 @@ Deno.test({ errorCaught = true; } assert(errorCaught); - } + }, }); Deno.test({ @@ -131,5 +131,5 @@ Deno.test({ errorCaught = true; } assert(errorCaught); - } + }, }); diff --git a/std/encoding/binary.ts b/std/encoding/binary.ts index 2eec9b4ab..cd338703b 100644 --- a/std/encoding/binary.ts +++ b/std/encoding/binary.ts @@ -34,7 +34,7 @@ const rawTypeSizes = { int64: 8, uint64: 8, float32: 4, - float64: 8 + float64: 8, }; /** Returns the number of bytes required to store the given data-type. */ diff --git a/std/encoding/binary_test.ts b/std/encoding/binary_test.ts index 54f8cbded..084ca2fa4 100644 --- a/std/encoding/binary_test.ts +++ b/std/encoding/binary_test.ts @@ -11,7 +11,7 @@ import { varbig, varnum, writeVarbig, - writeVarnum + writeVarnum, } from "./binary.ts"; Deno.test(async function testGetNBytes(): Promise<void> { diff --git a/std/encoding/csv.ts b/std/encoding/csv.ts index 12336b10d..c8c7719ca 100644 --- a/std/encoding/csv.ts +++ b/std/encoding/csv.ts @@ -118,7 +118,7 @@ export async function readMatrix( opt: ReadOptions = { comma: ",", trimLeadingSpace: false, - lazyQuotes: false + lazyQuotes: false, } ): Promise<string[][]> { const result: string[][] = []; @@ -195,7 +195,7 @@ export interface ParseOptions extends ReadOptions { export async function parse( input: string | BufReader, opt: ParseOptions = { - header: false + header: false, } ): Promise<unknown[]> { let r: string[][]; @@ -215,7 +215,7 @@ export async function parse( headers = h.map( (e): HeaderOptions => { return { - name: e + name: e, }; } ); @@ -226,7 +226,7 @@ export async function parse( headers = head.map( (e): HeaderOptions => { return { - name: e + name: e, }; } ); diff --git a/std/encoding/csv_test.ts b/std/encoding/csv_test.ts index 74ad00c72..cb61de433 100644 --- a/std/encoding/csv_test.ts +++ b/std/encoding/csv_test.ts @@ -14,20 +14,20 @@ const testCases = [ { Name: "Simple", Input: "a,b,c\n", - Output: [["a", "b", "c"]] + Output: [["a", "b", "c"]], }, { Name: "CRLF", Input: "a,b\r\nc,d\r\n", Output: [ ["a", "b"], - ["c", "d"] - ] + ["c", "d"], + ], }, { Name: "BareCR", Input: "a,b\rc,d\r\n", - Output: [["a", "b\rc", "d"]] + Output: [["a", "b\rc", "d"]], }, { Name: "RFC4180test", @@ -41,20 +41,20 @@ zzz,yyy,xxx`, ["#field1", "field2", "field3"], ["aaa", "bbb", "ccc"], ["a,a", `bbb`, "ccc"], - ["zzz", "yyy", "xxx"] + ["zzz", "yyy", "xxx"], ], - ignore: true + ignore: true, }, { Name: "NoEOLTest", Input: "a,b,c", - Output: [["a", "b", "c"]] + Output: [["a", "b", "c"]], }, { Name: "Semicolon", Input: "a;b;c\n", Output: [["a", "b", "c"]], - Comma: ";" + Comma: ";", }, { Name: "MultiLine", @@ -63,103 +63,103 @@ line","one line","three line field"`, Output: [["two\nline"], ["one line"], ["three\nline\nfield"]], - ignore: true + ignore: true, }, { Name: "BlankLine", Input: "a,b,c\n\nd,e,f\n\n", Output: [ ["a", "b", "c"], - ["d", "e", "f"] - ] + ["d", "e", "f"], + ], }, { Name: "BlankLineFieldCount", Input: "a,b,c\n\nd,e,f\n\n", Output: [ ["a", "b", "c"], - ["d", "e", "f"] + ["d", "e", "f"], ], UseFieldsPerRecord: true, - FieldsPerRecord: 0 + FieldsPerRecord: 0, }, { Name: "TrimSpace", Input: " a, b, c\n", Output: [["a", "b", "c"]], - TrimLeadingSpace: true + TrimLeadingSpace: true, }, { Name: "LeadingSpace", Input: " a, b, c\n", - Output: [[" a", " b", " c"]] + Output: [[" a", " b", " c"]], }, { Name: "Comment", Input: "#1,2,3\na,b,c\n#comment", Output: [["a", "b", "c"]], - Comment: "#" + Comment: "#", }, { Name: "NoComment", Input: "#1,2,3\na,b,c", Output: [ ["#1", "2", "3"], - ["a", "b", "c"] - ] + ["a", "b", "c"], + ], }, { Name: "LazyQuotes", Input: `a "word","1"2",a","b`, Output: [[`a "word"`, `1"2`, `a"`, `b`]], - LazyQuotes: true + LazyQuotes: true, }, { Name: "BareQuotes", Input: `a "word","1"2",a"`, Output: [[`a "word"`, `1"2`, `a"`]], - LazyQuotes: true + LazyQuotes: true, }, { Name: "BareDoubleQuotes", Input: `a""b,c`, Output: [[`a""b`, `c`]], - LazyQuotes: true + LazyQuotes: true, }, { Name: "BadDoubleQuotes", Input: `a""b,c`, - Error: ErrBareQuote + Error: ErrBareQuote, // Error: &ParseError{StartLine: 1, Line: 1, Column: 1, Err: ErrBareQuote}, }, { Name: "TrimQuote", Input: ` "a"," b",c`, Output: [["a", " b", "c"]], - TrimLeadingSpace: true + TrimLeadingSpace: true, }, { Name: "BadBareQuote", Input: `a "word","b"`, - Error: ErrBareQuote + Error: ErrBareQuote, // &ParseError{StartLine: 1, Line: 1, Column: 2, Err: ErrBareQuote} }, { Name: "BadTrailingQuote", Input: `"a word",b"`, - Error: ErrBareQuote + Error: ErrBareQuote, }, { Name: "ExtraneousQuote", Input: `"a "word","b"`, - Error: ErrBareQuote + Error: ErrBareQuote, }, { Name: "BadFieldCount", Input: "a,b,c\nd,e", Error: ErrFieldCount, UseFieldsPerRecord: true, - FieldsPerRecord: 0 + FieldsPerRecord: 0, }, { Name: "BadFieldCount1", @@ -167,37 +167,37 @@ field"`, // Error: &ParseError{StartLine: 1, Line: 1, Err: ErrFieldCount}, UseFieldsPerRecord: true, FieldsPerRecord: 2, - Error: ErrFieldCount + Error: ErrFieldCount, }, { Name: "FieldCount", Input: "a,b,c\nd,e", Output: [ ["a", "b", "c"], - ["d", "e"] - ] + ["d", "e"], + ], }, { Name: "TrailingCommaEOF", Input: "a,b,c,", - Output: [["a", "b", "c", ""]] + Output: [["a", "b", "c", ""]], }, { Name: "TrailingCommaEOL", Input: "a,b,c,\n", - Output: [["a", "b", "c", ""]] + Output: [["a", "b", "c", ""]], }, { Name: "TrailingCommaSpaceEOF", Input: "a,b,c, ", Output: [["a", "b", "c", ""]], - TrimLeadingSpace: true + TrimLeadingSpace: true, }, { Name: "TrailingCommaSpaceEOL", Input: "a,b,c, \n", Output: [["a", "b", "c", ""]], - TrimLeadingSpace: true + TrimLeadingSpace: true, }, { Name: "TrailingCommaLine3", @@ -205,14 +205,14 @@ field"`, Output: [ ["a", "b", "c"], ["d", "e", "f"], - ["g", "hi", ""] + ["g", "hi", ""], ], - TrimLeadingSpace: true + TrimLeadingSpace: true, }, { Name: "NotTrailingComma3", Input: "a,b,c, \n", - Output: [["a", "b", "c", " "]] + Output: [["a", "b", "c", " "]], }, { Name: "CommaFieldTest", @@ -237,98 +237,98 @@ x,,, ["x", "y", "z", ""], ["x", "y", "", ""], ["x", "", "", ""], - ["", "", "", ""] - ] + ["", "", "", ""], + ], }, { Name: "TrailingCommaIneffective1", Input: "a,b,\nc,d,e", Output: [ ["a", "b", ""], - ["c", "d", "e"] + ["c", "d", "e"], ], - TrimLeadingSpace: true + TrimLeadingSpace: true, }, { Name: "ReadAllReuseRecord", Input: "a,b\nc,d", Output: [ ["a", "b"], - ["c", "d"] + ["c", "d"], ], - ReuseRecord: true + ReuseRecord: true, }, { Name: "StartLine1", // Issue 19019 Input: 'a,"b\nc"d,e', Error: true, // Error: &ParseError{StartLine: 1, Line: 2, Column: 1, Err: ErrQuote}, - ignore: true + ignore: true, }, { Name: "StartLine2", Input: 'a,b\n"d\n\n,e', Error: true, // Error: &ParseError{StartLine: 2, Line: 5, Column: 0, Err: ErrQuote}, - ignore: true + ignore: true, }, { Name: "CRLFInQuotedField", // Issue 21201 Input: 'A,"Hello\r\nHi",B\r\n', Output: [["A", "Hello\nHi", "B"]], - ignore: true + ignore: true, }, { Name: "BinaryBlobField", // Issue 19410 Input: "x09\x41\xb4\x1c,aktau", - Output: [["x09A\xb4\x1c", "aktau"]] + Output: [["x09A\xb4\x1c", "aktau"]], }, { Name: "TrailingCR", Input: "field1,field2\r", Output: [["field1", "field2"]], - ignore: true + ignore: true, }, { Name: "QuotedTrailingCR", Input: '"field"\r', Output: [['"field"']], - ignore: true + ignore: true, }, { Name: "QuotedTrailingCRCR", Input: '"field"\r\r', Error: true, // Error: &ParseError{StartLine: 1, Line: 1, Column: 6, Err: ErrQuote}, - ignore: true + ignore: true, }, { Name: "FieldCR", Input: "field\rfield\r", Output: [["field\rfield"]], - ignore: true + ignore: true, }, { Name: "FieldCRCR", Input: "field\r\rfield\r\r", Output: [["field\r\rfield\r"]], - ignore: true + ignore: true, }, { Name: "FieldCRCRLF", Input: "field\r\r\nfield\r\r\n", - Output: [["field\r"], ["field\r"]] + Output: [["field\r"], ["field\r"]], }, { Name: "FieldCRCRLFCR", Input: "field\r\r\n\rfield\r\r\n\r", - Output: [["field\r"], ["\rfield\r"]] + Output: [["field\r"], ["\rfield\r"]], }, { Name: "FieldCRCRLFCRCR", Input: "field\r\r\n\r\rfield\r\r\n\r\r", Output: [["field\r"], ["\r\rfield\r"], ["\r"]], - ignore: true + ignore: true, }, { Name: "MultiFieldCRCRLFCRCR", @@ -336,9 +336,9 @@ x,,, Output: [ ["field1", "field2\r"], ["\r\rfield1", "field2\r"], - ["\r\r", ""] + ["\r\r", ""], ], - ignore: true + ignore: true, }, { Name: "NonASCIICommaAndComment", @@ -346,14 +346,14 @@ x,,, Output: [["a", "b,c", "d,e"]], TrimLeadingSpace: true, Comma: "£", - Comment: "€" + Comment: "€", }, { Name: "NonASCIICommaAndCommentWithQuotes", Input: 'a€" b,"€ c\nλ comment\n', Output: [["a", " b,", " c"]], Comma: "€", - Comment: "λ" + Comment: "λ", }, { // λ and θ start with the same byte. @@ -362,24 +362,24 @@ x,,, Input: '"abθcd"λefθgh', Output: [["abθcd", "efθgh"]], Comma: "λ", - Comment: "€" + Comment: "€", }, { Name: "NonASCIICommentConfusion", Input: "λ\nλ\nθ\nλ\n", Output: [["λ"], ["λ"], ["λ"]], - Comment: "θ" + Comment: "θ", }, { Name: "QuotedFieldMultipleLF", Input: '"\n\n\n\n"', Output: [["\n\n\n\n"]], - ignore: true + ignore: true, }, { Name: "MultipleCRLF", Input: "\r\n\r\n\r\n\r\n", - ignore: true + ignore: true, }, /** * The implementation may read each line in several chunks if @@ -392,77 +392,77 @@ x,,, "#ignore\n".repeat(10000) + "@".repeat(5000) + "," + "*".repeat(5000), Output: [["@".repeat(5000), "*".repeat(5000)]], Comment: "#", - ignore: true + ignore: true, }, { Name: "QuoteWithTrailingCRLF", Input: '"foo"bar"\r\n', - Error: ErrBareQuote + Error: ErrBareQuote, // Error: &ParseError{StartLine: 1, Line: 1, Column: 4, Err: ErrQuote}, }, { Name: "LazyQuoteWithTrailingCRLF", Input: '"foo"bar"\r\n', Output: [[`foo"bar`]], - LazyQuotes: true + LazyQuotes: true, }, { Name: "DoubleQuoteWithTrailingCRLF", Input: '"foo""bar"\r\n', Output: [[`foo"bar`]], - ignore: true + ignore: true, }, { Name: "EvenQuotes", Input: `""""""""`, Output: [[`"""`]], - ignore: true + ignore: true, }, { Name: "OddQuotes", Input: `"""""""`, Error: true, // Error:" &ParseError{StartLine: 1, Line: 1, Column: 7, Err: ErrQuote}", - ignore: true + ignore: true, }, { Name: "LazyOddQuotes", Input: `"""""""`, Output: [[`"""`]], LazyQuotes: true, - ignore: true + ignore: true, }, { Name: "BadComma1", Comma: "\n", - Error: ErrInvalidDelim + Error: ErrInvalidDelim, }, { Name: "BadComma2", Comma: "\r", - Error: ErrInvalidDelim + Error: ErrInvalidDelim, }, { Name: "BadComma3", Comma: '"', - Error: ErrInvalidDelim + Error: ErrInvalidDelim, }, { Name: "BadComment1", Comment: "\n", - Error: ErrInvalidDelim + Error: ErrInvalidDelim, }, { Name: "BadComment2", Comment: "\r", - Error: ErrInvalidDelim + Error: ErrInvalidDelim, }, { Name: "BadCommaComment", Comma: "X", Comment: "X", - Error: ErrInvalidDelim - } + Error: ErrInvalidDelim, + }, ]; for (const t of testCases) { Deno.test({ @@ -500,7 +500,7 @@ for (const t of testCases) { comment: comment, trimLeadingSpace: trim, fieldsPerRecord: fieldsPerRec, - lazyQuotes: lazyquote + lazyQuotes: lazyquote, } ); } catch (e) { @@ -516,13 +516,13 @@ for (const t of testCases) { comment: comment, trimLeadingSpace: trim, fieldsPerRecord: fieldsPerRec, - lazyQuotes: lazyquote + lazyQuotes: lazyquote, } ); const expected = t.Output; assertEquals(actual, expected); } - } + }, }); } @@ -531,13 +531,13 @@ const parseTestCases = [ name: "simple", in: "a,b,c", header: false, - result: [["a", "b", "c"]] + result: [["a", "b", "c"]], }, { name: "simple Bufreader", in: new BufReader(new StringReader("a,b,c")), header: false, - result: [["a", "b", "c"]] + result: [["a", "b", "c"]], }, { name: "multiline", @@ -545,14 +545,14 @@ const parseTestCases = [ header: false, result: [ ["a", "b", "c"], - ["e", "f", "g"] - ] + ["e", "f", "g"], + ], }, { name: "header mapping boolean", in: "a,b,c\ne,f,g\n", header: true, - result: [{ a: "e", b: "f", c: "g" }] + result: [{ a: "e", b: "f", c: "g" }], }, { name: "header mapping array", @@ -560,8 +560,8 @@ const parseTestCases = [ header: ["this", "is", "sparta"], result: [ { this: "a", is: "b", sparta: "c" }, - { this: "e", is: "f", sparta: "g" } - ] + { this: "e", is: "f", sparta: "g" }, + ], }, { name: "header mapping object", @@ -569,8 +569,8 @@ const parseTestCases = [ header: [{ name: "this" }, { name: "is" }, { name: "sparta" }], result: [ { this: "a", is: "b", sparta: "c" }, - { this: "e", is: "f", sparta: "g" } - ] + { this: "e", is: "f", sparta: "g" }, + ], }, { name: "header mapping parse entry", @@ -580,25 +580,25 @@ const parseTestCases = [ name: "this", parse: (e: string): string => { return `b${e}$$`; - } + }, }, { name: "is", parse: (e: string): number => { return e.length; - } + }, }, { name: "sparta", parse: (e: string): unknown => { return { bim: `boom-${e}` }; - } - } + }, + }, ], result: [ { this: "ba$$", is: 1, sparta: { bim: `boom-c` } }, - { this: "be$$", is: 1, sparta: { bim: `boom-g` } } - ] + { this: "be$$", is: 1, sparta: { bim: `boom-g` } }, + ], }, { name: "multiline parse", @@ -609,8 +609,8 @@ const parseTestCases = [ header: false, result: [ { super: "a", street: "b", fighter: "c" }, - { super: "e", street: "f", fighter: "g" } - ] + { super: "e", street: "f", fighter: "g" }, + ], }, { name: "header mapping object parseline", @@ -621,9 +621,9 @@ const parseTestCases = [ }, result: [ { super: "a", street: "b", fighter: "c" }, - { super: "e", street: "f", fighter: "g" } - ] - } + { super: "e", street: "f", fighter: "g" }, + ], + }, ]; for (const testCase of parseTestCases) { @@ -632,9 +632,9 @@ for (const testCase of parseTestCases) { async fn(): Promise<void> { const r = await parse(testCase.in, { header: testCase.header, - parse: testCase.parse as (input: unknown) => unknown + parse: testCase.parse as (input: unknown) => unknown, }); assertEquals(r, testCase.result); - } + }, }); } diff --git a/std/encoding/hex_test.ts b/std/encoding/hex_test.ts index f98fe5422..56bdbf4f0 100644 --- a/std/encoding/hex_test.ts +++ b/std/encoding/hex_test.ts @@ -14,7 +14,7 @@ import { decode, decodeString, errLength, - errInvalidByte + errInvalidByte, } from "./hex.ts"; function toByte(s: string): number { @@ -29,7 +29,7 @@ const testCases = [ ["f0f1f2f3f4f5f6f7", [0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7]], ["f8f9fafbfcfdfeff", [0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff]], ["67", Array.from(new TextEncoder().encode("g"))], - ["e3a1", [0xe3, 0xa1]] + ["e3a1", [0xe3, 0xa1]], ]; const errCases = [ @@ -42,7 +42,7 @@ const errCases = [ ["0g", "", errInvalidByte(new TextEncoder().encode("g")[0])], ["00gg", "\x00", errInvalidByte(new TextEncoder().encode("g")[0])], ["0\x01", "", errInvalidByte(new TextEncoder().encode("\x01")[0])], - ["ffeed", "\xff\xee", errLength()] + ["ffeed", "\xff\xee", errLength()], ]; Deno.test({ @@ -53,7 +53,7 @@ Deno.test({ assertEquals(encodedLen(2), 4); assertEquals(encodedLen(3), 6); assertEquals(encodedLen(4), 8); - } + }, }); Deno.test({ @@ -88,7 +88,7 @@ Deno.test({ assertEquals(dest.length, n); assertEquals(new TextDecoder().decode(dest), enc); } - } + }, }); Deno.test({ @@ -97,7 +97,7 @@ Deno.test({ for (const [enc, dec] of testCases) { assertEquals(encodeToString(new Uint8Array(dec as number[])), enc); } - } + }, }); Deno.test({ @@ -108,7 +108,7 @@ Deno.test({ assertEquals(decodedLen(4), 2); assertEquals(decodedLen(6), 3); assertEquals(decodedLen(8), 4); - } + }, }); Deno.test({ @@ -117,7 +117,7 @@ Deno.test({ // Case for decoding uppercase hex characters, since // Encode always uses lowercase. const extraTestcase = [ - ["F8F9FAFBFCFDFEFF", [0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff]] + ["F8F9FAFBFCFDFEFF", [0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff]], ]; const cases = testCases.concat(extraTestcase); @@ -129,7 +129,7 @@ Deno.test({ assertEquals(err, undefined); assertEquals(Array.from(dest), Array.from(dec as number[])); } - } + }, }); Deno.test({ @@ -140,7 +140,7 @@ Deno.test({ assertEquals(dec, Array.from(dst)); } - } + }, }); Deno.test({ @@ -155,7 +155,7 @@ Deno.test({ ); assertEquals(err, expectedErr); } - } + }, }); Deno.test({ @@ -175,5 +175,5 @@ Deno.test({ assertEquals(new TextDecoder("ascii").decode(out), output as string); } } - } + }, }); diff --git a/std/encoding/mod.ts b/std/encoding/mod.ts index d63cf47f3..eaa28ae27 100644 --- a/std/encoding/mod.ts +++ b/std/encoding/mod.ts @@ -2,13 +2,13 @@ export { HeaderOptions as CsvHeaderOptions, ParseError as CsvParseError, ParseOptions as ParseCsvOptions, - parse as parseCsv + parse as parseCsv, } from "./csv.ts"; export { decode as decodeHex, decodeString as decodeHexString, encode as encodeToHex, - encodeToString as encodeToHexString + encodeToString as encodeToHexString, } from "./hex.ts"; export { parse as parseToml, stringify as tomlStringify } from "./toml.ts"; export { parse as parseYaml, stringify as yamlStringify } from "./yaml.ts"; diff --git a/std/encoding/toml_test.ts b/std/encoding/toml_test.ts index 425b8a22c..d272a29ff 100644 --- a/std/encoding/toml_test.ts +++ b/std/encoding/toml_test.ts @@ -29,12 +29,12 @@ Deno.test({ str6: "The quick brown\nfox jumps over\nthe lazy dog.", lines: "The first newline is\ntrimmed in raw strings.\n All other " + - "whitespace\n is preserved." - } + "whitespace\n is preserved.", + }, }; const actual = parseFile(path.join(testFilesDir, "string.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -43,7 +43,7 @@ Deno.test({ const expected = { boolean: { bool1: true, bool2: false } }; const actual = parseFile(path.join(testFilesDir, "CRLF.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -52,7 +52,7 @@ Deno.test({ const expected = { boolean: { bool1: true, bool2: false } }; const actual = parseFile(path.join(testFilesDir, "boolean.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -72,12 +72,12 @@ Deno.test({ hex3: "0xdead_beef", oct1: "0o01234567", oct2: "0o755", - bin1: "0b11010110" - } + bin1: "0b11010110", + }, }; const actual = parseFile(path.join(testFilesDir, "integer.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -98,12 +98,12 @@ Deno.test({ sf3: -Infinity, sf4: NaN, sf5: NaN, - sf6: NaN - } + sf6: NaN, + }, }; const actual = parseFile(path.join(testFilesDir, "float.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -113,14 +113,14 @@ Deno.test({ arrays: { data: [ ["gamma", "delta"], - [1, 2] + [1, 2], ], - hosts: ["alpha", "omega"] - } + hosts: ["alpha", "omega"], + }, }; const actual = parseFile(path.join(testFilesDir, "arrays.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -133,27 +133,27 @@ Deno.test({ in: { the: { toml: { - name: "Tom Preston-Werner" - } - } - } - } - } + name: "Tom Preston-Werner", + }, + }, + }, + }, + }, }, servers: { alpha: { ip: "10.0.0.1", - dc: "eqdc10" + dc: "eqdc10", }, beta: { ip: "10.0.0.2", - dc: "eqdc20" - } - } + dc: "eqdc20", + }, + }, }; const actual = parseFile(path.join(testFilesDir, "table.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -164,11 +164,11 @@ Deno.test({ not: "[node]", regex: "<ic*s*>", NANI: "何?!", - comment: "Comment inside # the comment" + comment: "Comment inside # the comment", }; const actual = parseFile(path.join(testFilesDir, "simple.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -182,12 +182,12 @@ Deno.test({ odt4: new Date("1979-05-27 07:32:00Z"), ld1: new Date("1979-05-27"), lt1: "07:32:00", - lt2: "00:32:00.999999" - } + lt2: "00:32:00.999999", + }, }; const actual = parseFile(path.join(testFilesDir, "datetime.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -200,39 +200,39 @@ Deno.test({ malevolant: { creation: { drum: { - kit: "Tama" - } - } - } + kit: "Tama", + }, + }, + }, }, derek: { - roddy: "drummer" - } + roddy: "drummer", + }, }, name: { first: "Tom", - last: "Preston-Werner" + last: "Preston-Werner", }, point: { x: 1, - y: 2 + y: 2, }, dog: { type: { - name: "pug" - } + name: "pug", + }, }, "tosin.abasi": "guitarist", animal: { as: { - leaders: "tosin" - } - } - } + leaders: "tosin", + }, + }, + }, }; const actual = parseFile(path.join(testFilesDir, "inlineTable.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -241,13 +241,13 @@ Deno.test({ const expected = { bin: [ { name: "deno", path: "cli/main.rs" }, - { name: "deno_core", path: "src/foo.rs" } + { name: "deno_core", path: "src/foo.rs" }, ], - nib: [{ name: "node", path: "not_found" }] + nib: [{ name: "node", path: "not_found" }], }; const actual = parseFile(path.join(testFilesDir, "arrayTable.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -286,14 +286,14 @@ Deno.test({ "tokio-io": "0.1.11", "tokio-process": "0.2.3", "tokio-threadpool": "0.1.11", - url: "1.7.2" + url: "1.7.2", }, - target: { "cfg(windows)": { dependencies: { winapi: "0.3.6" } } } + target: { "cfg(windows)": { dependencies: { winapi: "0.3.6" } } }, }; /* eslint-enable @typescript-eslint/camelcase */ const actual = parseFile(path.join(testFilesDir, "cargo.toml")); assertEquals(actual, expected); - } + }, }); Deno.test({ @@ -303,15 +303,15 @@ Deno.test({ foo: { bar: "deno" }, this: { is: { nested: "denonono" } }, "https://deno.land/std": { - $: "doller" + $: "doller", }, "##": { deno: { "https://deno.land": { proto: "https", - ":80": "port" - } - } + ":80": "port", + }, + }, }, arrayObjects: [{ stuff: "in" }, {}, { the: "array" }], deno: "is", @@ -347,9 +347,9 @@ Deno.test({ sf6: NaN, data: [ ["gamma", "delta"], - [1, 2] + [1, 2], ], - hosts: ["alpha", "omega"] + hosts: ["alpha", "omega"], }; const expected = `deno = "is" not = "[node]" @@ -408,5 +408,5 @@ the = "array" `; const actual = stringify(src); assertEquals(actual, expected); - } + }, }); diff --git a/std/encoding/yaml.ts b/std/encoding/yaml.ts index a8784319b..76b1b8379 100644 --- a/std/encoding/yaml.ts +++ b/std/encoding/yaml.ts @@ -6,6 +6,6 @@ export { ParseOptions, parse, parseAll } from "./yaml/parse.ts"; export { DumpOptions as StringifyOptions, - stringify + stringify, } from "./yaml/stringify.ts"; export * from "./yaml/schema/mod.ts"; diff --git a/std/encoding/yaml/dumper/dumper.ts b/std/encoding/yaml/dumper/dumper.ts index 3a34e14cc..1280ee757 100644 --- a/std/encoding/yaml/dumper/dumper.ts +++ b/std/encoding/yaml/dumper/dumper.ts @@ -73,7 +73,7 @@ const DEPRECATED_BOOLEANS_SYNTAX = [ "NO", "off", "Off", - "OFF" + "OFF", ]; function encodeHex(character: number): string { diff --git a/std/encoding/yaml/dumper/dumper_state.ts b/std/encoding/yaml/dumper/dumper_state.ts index 88164a0d2..94cd84878 100644 --- a/std/encoding/yaml/dumper/dumper_state.ts +++ b/std/encoding/yaml/dumper/dumper_state.ts @@ -121,7 +121,7 @@ export class DumperState extends State { lineWidth = 80, noRefs = false, noCompatMode = false, - condenseFlow = false + condenseFlow = false, }: DumperStateOptions) { super(schema); this.indent = Math.max(1, indent); diff --git a/std/encoding/yaml/example/dump.ts b/std/encoding/yaml/example/dump.ts index 746c3be01..db3647274 100644 --- a/std/encoding/yaml/example/dump.ts +++ b/std/encoding/yaml/example/dump.ts @@ -10,13 +10,13 @@ console.log( "a", "b", { - a: false + a: false, }, { - a: false - } - ] + a: false, + }, + ], }, - test: "foobar" + test: "foobar", }) ); diff --git a/std/encoding/yaml/example/inout.ts b/std/encoding/yaml/example/inout.ts index 80cad8258..b0b47e3fe 100644 --- a/std/encoding/yaml/example/inout.ts +++ b/std/encoding/yaml/example/inout.ts @@ -9,14 +9,14 @@ const test = { "a", "b", { - a: false + a: false, }, { - a: false - } - ] + a: false, + }, + ], }, - test: "foobar" + test: "foobar", }; const string = stringify(test); diff --git a/std/encoding/yaml/loader/loader.ts b/std/encoding/yaml/loader/loader.ts index 1ab4fc7f5..f0d535624 100644 --- a/std/encoding/yaml/loader/loader.ts +++ b/std/encoding/yaml/loader/loader.ts @@ -37,11 +37,11 @@ function _class(obj: unknown): string { } function isEOL(c: number): boolean { - return c === 0x0a /* LF */ || c === 0x0d /* CR */; + return c === 0x0a || /* LF */ c === 0x0d /* CR */; } function isWhiteSpace(c: number): boolean { - return c === 0x09 /* Tab */ || c === 0x20 /* Space */; + return c === 0x09 || /* Tab */ c === 0x20 /* Space */; } function isWsOrEol(c: number): boolean { @@ -64,13 +64,13 @@ function isFlowIndicator(c: number): boolean { } function fromHexCode(c: number): number { - if (0x30 /* 0 */ <= c && c <= 0x39 /* 9 */) { + if (0x30 <= /* 0 */ c && c <= 0x39 /* 9 */) { return c - 0x30; } const lc = c | 0x20; - if (0x61 /* a */ <= lc && lc <= 0x66 /* f */) { + if (0x61 <= /* a */ lc && lc <= 0x66 /* f */) { return lc - 0x61 + 10; } @@ -91,7 +91,7 @@ function escapedHexLen(c: number): number { } function fromDecimalCode(c: number): number { - if (0x30 /* 0 */ <= c && c <= 0x39 /* 9 */) { + if (0x30 <= /* 0 */ c && c <= 0x39 /* 9 */) { return c - 0x30; } @@ -251,7 +251,7 @@ const directiveHandlers: DirectiveHandlers = { state.tagMap = {}; } state.tagMap[handle] = prefix; - } + }, }; function captureSegment( @@ -414,7 +414,7 @@ function skipSeparationSpace( if (allowComments && ch === 0x23 /* # */) { do { ch = state.input.charCodeAt(++state.position); - } while (ch !== 0x0a /* LF */ && ch !== 0x0d /* CR */ && ch !== 0); + } while (ch !== 0x0a && /* LF */ ch !== 0x0d && /* CR */ ch !== 0); } if (isEOL(ch)) { @@ -451,7 +451,7 @@ function testDocumentSeparator(state: LoaderState): boolean { // Condition state.position === state.lineStart is tested // in parent on each call, for efficiency. No needs to test here again. if ( - (ch === 0x2d /* - */ || ch === 0x2e) /* . */ && + (ch === 0x2d || /* - */ ch === 0x2e) /* . */ && ch === state.input.charCodeAt(_position + 1) && ch === state.input.charCodeAt(_position + 2) ) { @@ -503,7 +503,7 @@ function readPlainScalar( } let following: number; - if (ch === 0x3f /* ? */ || ch === 0x2d /* - */) { + if (ch === 0x3f || /* ? */ ch === 0x2d /* - */) { following = state.input.charCodeAt(state.position + 1); if ( @@ -869,7 +869,7 @@ function readBlockScalar(state: LoaderState, nodeIndent: number): boolean { while (ch !== 0) { ch = state.input.charCodeAt(++state.position); - if (ch === 0x2b /* + */ || ch === 0x2d /* - */) { + if (ch === 0x2b || /* + */ ch === 0x2d /* - */) { if (CHOMPING_CLIP === chomping) { chomping = ch === 0x2b /* + */ ? CHOMPING_KEEP : CHOMPING_STRIP; } else { @@ -1103,7 +1103,7 @@ function readBlockMapping( // Explicit notation case. There are two separate blocks: // first for the key (denoted by "?") and second for the value (denoted by ":") // - if ((ch === 0x3f /* ? */ || ch === 0x3a) /* : */ && isWsOrEol(following)) { + if ((ch === 0x3f || /* ? */ ch === 0x3a) && /* : */ isWsOrEol(following)) { if (ch === 0x3f /* ? */) { if (atExplicitKey) { storeMappingPair( diff --git a/std/encoding/yaml/loader/loader_state.ts b/std/encoding/yaml/loader/loader_state.ts index 1e136025c..ca50fcaf1 100644 --- a/std/encoding/yaml/loader/loader_state.ts +++ b/std/encoding/yaml/loader/loader_state.ts @@ -56,7 +56,7 @@ export class LoaderState extends State { onWarning, legacy = false, json = false, - listener = null + listener = null, }: LoaderStateOptions ) { super(schema); diff --git a/std/encoding/yaml/parse_test.ts b/std/encoding/yaml/parse_test.ts index bdcd8db62..21f1b893b 100644 --- a/std/encoding/yaml/parse_test.ts +++ b/std/encoding/yaml/parse_test.ts @@ -20,7 +20,7 @@ Deno.test({ const expected = { test: "toto", foo: { bar: true, baz: 1, qux: null } }; assertEquals(parse(yaml), expected); - } + }, }); Deno.test({ @@ -40,17 +40,17 @@ name: Eve const expected = [ { id: 1, - name: "Alice" + name: "Alice", }, { id: 2, - name: "Bob" + name: "Bob", }, { id: 3, - name: "Eve" - } + name: "Eve", + }, ]; assertEquals(parseAll(yaml), expected); - } + }, }); diff --git a/std/encoding/yaml/schema.ts b/std/encoding/yaml/schema.ts index 1968e34c1..579644dbb 100644 --- a/std/encoding/yaml/schema.ts +++ b/std/encoding/yaml/schema.ts @@ -45,7 +45,7 @@ function compileMap(...typesList: Type[][]): TypeMap { fallback: {}, mapping: {}, scalar: {}, - sequence: {} + sequence: {}, }; for (const types of typesList) { diff --git a/std/encoding/yaml/schema/core.ts b/std/encoding/yaml/schema/core.ts index 82a512a1e..4fadc9bfe 100644 --- a/std/encoding/yaml/schema/core.ts +++ b/std/encoding/yaml/schema/core.ts @@ -9,5 +9,5 @@ import { json } from "./json.ts"; // Standard YAML's Core schema. // http://www.yaml.org/spec/1.2/spec.html#id2804923 export const core = new Schema({ - include: [json] + include: [json], }); diff --git a/std/encoding/yaml/schema/default.ts b/std/encoding/yaml/schema/default.ts index 0fe1dbf12..4c5ceeba7 100644 --- a/std/encoding/yaml/schema/default.ts +++ b/std/encoding/yaml/schema/default.ts @@ -12,5 +12,5 @@ import { core } from "./core.ts"; export const def = new Schema({ explicit: [binary, omap, pairs, set], implicit: [timestamp, merge], - include: [core] + include: [core], }); diff --git a/std/encoding/yaml/schema/failsafe.ts b/std/encoding/yaml/schema/failsafe.ts index 0fbb74ca9..74e1897be 100644 --- a/std/encoding/yaml/schema/failsafe.ts +++ b/std/encoding/yaml/schema/failsafe.ts @@ -9,5 +9,5 @@ import { map, seq, str } from "../type/mod.ts"; // Standard YAML's Failsafe schema. // http://www.yaml.org/spec/1.2/spec.html#id2802346 export const failsafe = new Schema({ - explicit: [str, seq, map] + explicit: [str, seq, map], }); diff --git a/std/encoding/yaml/schema/json.ts b/std/encoding/yaml/schema/json.ts index dae469f35..c30166fdf 100644 --- a/std/encoding/yaml/schema/json.ts +++ b/std/encoding/yaml/schema/json.ts @@ -11,5 +11,5 @@ import { failsafe } from "./failsafe.ts"; // http://www.yaml.org/spec/1.2/spec.html#id2803231 export const json = new Schema({ implicit: [nil, bool, int, float], - include: [failsafe] + include: [failsafe], }); diff --git a/std/encoding/yaml/stringify_test.ts b/std/encoding/yaml/stringify_test.ts index 941beb789..03a3090d9 100644 --- a/std/encoding/yaml/stringify_test.ts +++ b/std/encoding/yaml/stringify_test.ts @@ -16,14 +16,14 @@ Deno.test({ "a", "b", { - a: false + a: false, }, { - a: false - } - ] + a: false, + }, + ], }, - test: "foobar" + test: "foobar", }; const ASSERTS = `foo: @@ -37,5 +37,5 @@ test: foobar `; assertEquals(stringify(FIXTURE), ASSERTS); - } + }, }); diff --git a/std/encoding/yaml/type/binary.ts b/std/encoding/yaml/type/binary.ts index 8cfe54f79..f4823b3f7 100644 --- a/std/encoding/yaml/type/binary.ts +++ b/std/encoding/yaml/type/binary.ts @@ -135,5 +135,5 @@ export const binary = new Type("tag:yaml.org,2002:binary", { kind: "scalar", predicate: isBinary, represent: representYamlBinary, - resolve: resolveYamlBinary + resolve: resolveYamlBinary, }); diff --git a/std/encoding/yaml/type/bool.ts b/std/encoding/yaml/type/bool.ts index e39823872..a5a85cf9e 100644 --- a/std/encoding/yaml/type/bool.ts +++ b/std/encoding/yaml/type/bool.ts @@ -33,7 +33,7 @@ export const bool = new Type("tag:yaml.org,2002:bool", { }, camelcase(object: boolean): string { return object ? "True" : "False"; - } + }, }, - resolve: resolveYamlBoolean + resolve: resolveYamlBoolean, }); diff --git a/std/encoding/yaml/type/float.ts b/std/encoding/yaml/type/float.ts index acb12f5b0..5ae0689b2 100644 --- a/std/encoding/yaml/type/float.ts +++ b/std/encoding/yaml/type/float.ts @@ -121,5 +121,5 @@ export const float = new Type("tag:yaml.org,2002:float", { kind: "scalar", predicate: isFloat, represent: representYamlFloat, - resolve: resolveYamlFloat + resolve: resolveYamlFloat, }); diff --git a/std/encoding/yaml/type/int.ts b/std/encoding/yaml/type/int.ts index 93ec8260e..6a86aafe9 100644 --- a/std/encoding/yaml/type/int.ts +++ b/std/encoding/yaml/type/int.ts @@ -8,18 +8,18 @@ import { isNegativeZero, Any } from "../utils.ts"; function isHexCode(c: number): boolean { return ( - (0x30 /* 0 */ <= c && c <= 0x39) /* 9 */ || - (0x41 /* A */ <= c && c <= 0x46) /* F */ || - (0x61 /* a */ <= c && c <= 0x66) /* f */ + (0x30 <= /* 0 */ c && c <= 0x39) /* 9 */ || + (0x41 <= /* A */ c && c <= 0x46) /* F */ || + (0x61 <= /* a */ c && c <= 0x66) /* f */ ); } function isOctCode(c: number): boolean { - return 0x30 /* 0 */ <= c && c <= 0x37 /* 7 */; + return 0x30 <= /* 0 */ c && c <= 0x37 /* 7 */; } function isDecCode(c: number): boolean { - return 0x30 /* 0 */ <= c && c <= 0x39 /* 9 */; + return 0x30 <= /* 0 */ c && c <= 0x39 /* 9 */; } function resolveYamlInteger(data: string): boolean { @@ -175,17 +175,14 @@ export const int = new Type("tag:yaml.org,2002:int", { hexadecimal(obj: number): string { return obj >= 0 ? `0x${obj.toString(16).toUpperCase()}` - : `-0x${obj - .toString(16) - .toUpperCase() - .slice(1)}`; - } + : `-0x${obj.toString(16).toUpperCase().slice(1)}`; + }, }, resolve: resolveYamlInteger, styleAliases: { binary: [2, "bin"], decimal: [10, "dec"], hexadecimal: [16, "hex"], - octal: [8, "oct"] - } + octal: [8, "oct"], + }, }); diff --git a/std/encoding/yaml/type/map.ts b/std/encoding/yaml/type/map.ts index 60e678657..dcd99abca 100644 --- a/std/encoding/yaml/type/map.ts +++ b/std/encoding/yaml/type/map.ts @@ -10,5 +10,5 @@ export const map = new Type("tag:yaml.org,2002:map", { construct(data): Any { return data !== null ? data : {}; }, - kind: "mapping" + kind: "mapping", }); diff --git a/std/encoding/yaml/type/merge.ts b/std/encoding/yaml/type/merge.ts index 77b34025b..68314bf2e 100644 --- a/std/encoding/yaml/type/merge.ts +++ b/std/encoding/yaml/type/merge.ts @@ -11,5 +11,5 @@ function resolveYamlMerge(data: string): boolean { export const merge = new Type("tag:yaml.org,2002:merge", { kind: "scalar", - resolve: resolveYamlMerge + resolve: resolveYamlMerge, }); diff --git a/std/encoding/yaml/type/nil.ts b/std/encoding/yaml/type/nil.ts index 00627514c..8a48d02fb 100644 --- a/std/encoding/yaml/type/nil.ts +++ b/std/encoding/yaml/type/nil.ts @@ -39,7 +39,7 @@ export const nil = new Type("tag:yaml.org,2002:null", { }, camelcase(): string { return "Null"; - } + }, }, - resolve: resolveYamlNull + resolve: resolveYamlNull, }); diff --git a/std/encoding/yaml/type/omap.ts b/std/encoding/yaml/type/omap.ts index 541e31df6..d6d751505 100644 --- a/std/encoding/yaml/type/omap.ts +++ b/std/encoding/yaml/type/omap.ts @@ -42,5 +42,5 @@ function constructYamlOmap(data: Any): Any { export const omap = new Type("tag:yaml.org,2002:omap", { construct: constructYamlOmap, kind: "sequence", - resolve: resolveYamlOmap + resolve: resolveYamlOmap, }); diff --git a/std/encoding/yaml/type/pairs.ts b/std/encoding/yaml/type/pairs.ts index c964524b5..e999748ae 100644 --- a/std/encoding/yaml/type/pairs.ts +++ b/std/encoding/yaml/type/pairs.ts @@ -45,5 +45,5 @@ function constructYamlPairs(data: string): Any[] { export const pairs = new Type("tag:yaml.org,2002:pairs", { construct: constructYamlPairs, kind: "sequence", - resolve: resolveYamlPairs + resolve: resolveYamlPairs, }); diff --git a/std/encoding/yaml/type/seq.ts b/std/encoding/yaml/type/seq.ts index bd7ceb945..b19565dbc 100644 --- a/std/encoding/yaml/type/seq.ts +++ b/std/encoding/yaml/type/seq.ts @@ -10,5 +10,5 @@ export const seq = new Type("tag:yaml.org,2002:seq", { construct(data): Any { return data !== null ? data : []; }, - kind: "sequence" + kind: "sequence", }); diff --git a/std/encoding/yaml/type/set.ts b/std/encoding/yaml/type/set.ts index 3b7fca0e9..0bfe1c8db 100644 --- a/std/encoding/yaml/type/set.ts +++ b/std/encoding/yaml/type/set.ts @@ -27,5 +27,5 @@ function constructYamlSet(data: string): Any { export const set = new Type("tag:yaml.org,2002:set", { construct: constructYamlSet, kind: "mapping", - resolve: resolveYamlSet + resolve: resolveYamlSet, }); diff --git a/std/encoding/yaml/type/str.ts b/std/encoding/yaml/type/str.ts index c7227743e..cd6e9430f 100644 --- a/std/encoding/yaml/type/str.ts +++ b/std/encoding/yaml/type/str.ts @@ -8,5 +8,5 @@ export const str = new Type("tag:yaml.org,2002:str", { construct(data): string { return data !== null ? data : ""; }, - kind: "scalar" + kind: "scalar", }); diff --git a/std/encoding/yaml/type/timestamp.ts b/std/encoding/yaml/type/timestamp.ts index 14d24077a..eb03b3825 100644 --- a/std/encoding/yaml/type/timestamp.ts +++ b/std/encoding/yaml/type/timestamp.ts @@ -92,5 +92,5 @@ export const timestamp = new Type("tag:yaml.org,2002:timestamp", { instanceOf: Date, kind: "scalar", represent: representYamlTimestamp, - resolve: resolveYamlTimestamp + resolve: resolveYamlTimestamp, }); |