Skip to content

Commit

Permalink
covered text_builder utils testcases
Browse files Browse the repository at this point in the history
  • Loading branch information
EthanThatOneKid committed Dec 8, 2021
1 parent 7b3f2e0 commit b2bd98d
Show file tree
Hide file tree
Showing 7 changed files with 201 additions and 11 deletions.
12 changes: 6 additions & 6 deletions lib/text_builder/text_builder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import {
CartridgeEvent,
PropertyDefinition,
} from "../cartridge/mod.ts";
import { Lexicon, Token } from "../tokenize/mod.ts";
import type { Token } from "../tokenize/mod.ts";
import {
makeFileEndEventContext,
makeFileStartEventContext,
Expand Down Expand Up @@ -35,12 +35,12 @@ export class TextBuilder {
): Promise<void>;
public async append(
event: CartridgeEvent.InlineComment,
tokens: Token[],
tokens: [Token],
comments: Token[],
): Promise<void>;
public async append(
event: CartridgeEvent.MultilineComment,
tokens: Token[],
tokens: [Token],
comments: Token[],
): Promise<void>;
public async append(
Expand All @@ -63,7 +63,7 @@ export class TextBuilder {
): Promise<void>;
public async append(
event: CartridgeEvent.StructClose,
tokens: Token[],
tokens: [Token],
comments: Token[],
): Promise<void>;
public async append(
Expand All @@ -90,14 +90,14 @@ export class TextBuilder {
case CartridgeEvent.InlineComment: {
code = await this.cartridge.dispatch(
CartridgeEvent.InlineComment,
makeInlineCommentEventContext(this.currentBlock, tokens, comments),
makeInlineCommentEventContext(this.currentBlock, tokens),
);
break;
}
case CartridgeEvent.MultilineComment: {
code = await this.cartridge.dispatch(
CartridgeEvent.MultilineComment,
makeMultilineCommentEventContext(this.currentBlock, tokens, comments),
makeMultilineCommentEventContext(this.currentBlock, tokens),
);
break;
}
Expand Down
158 changes: 157 additions & 1 deletion lib/text_builder/utils.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { assertEquals } from "../../deps/std/testing.ts";
import { T } from "../tokenize/mod.ts";
import { T, Token } from "../tokenize/mod.ts";
import { CodeBlock } from "../code_block/mod.ts";
import {
cleanComment,
makeFileEndEventContext,
Expand All @@ -11,6 +12,11 @@ import {
makeStructCloseEventContext,
makeStructOpenEventContext,
} from "./utils.ts";
import { CartridgeEvent } from "../cartridge/mod.ts";
import type {
CartridgeEventContext,
PropertyDefinition,
} from "../cartridge/mod.ts";

Deno.test("cleans inlined comments to extract text content", () => {
const expectation = ["example"];
Expand All @@ -35,3 +41,153 @@ Deno.test("cleans multi-inlined comments to extract text content (omits whitespa
));
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'file_end' event context object", () => {
const code = new CodeBlock();
const data = null;
const tokens: Token[] = [];
const expectation: CartridgeEventContext<CartridgeEvent.FileEnd> = {
type: CartridgeEvent.FileEnd,
code,
data,
tokens,
};
const reality = makeFileEndEventContext(code, tokens);
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'file_start' event context object", () => {
const code = new CodeBlock();
const data = null;
const tokens: Token[] = [];
const expectation: CartridgeEventContext<CartridgeEvent.FileStart> = {
type: CartridgeEvent.FileStart,
code,
data,
tokens,
};
const reality = makeFileStartEventContext(code, tokens);
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'inline_comment' event context object", () => {
const code = new CodeBlock();
const tokens: Token[] = [T.comment("; example", 1, 1)];
const expectation: CartridgeEventContext<CartridgeEvent.InlineComment> = {
type: CartridgeEvent.InlineComment,
code,
data: {
comments: ["example"],
},
tokens,
};
const reality = makeInlineCommentEventContext(code, tokens);
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'multiline_comment' event context object", () => {
const code = new CodeBlock();
const tokens: Token[] = [T.comment("; example", 1, 1)];
const expectation: CartridgeEventContext<CartridgeEvent.MultilineComment> = {
type: CartridgeEvent.MultilineComment,
code,
tokens,
data: {
comments: ["example"],
},
};
const reality = makeMultilineCommentEventContext(code, tokens);
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'load' event context object", () => {
const code = new CodeBlock();
const source = "./example.fart";
const dependencies = ["Example1", "Example2", "Example3"];
const tokens: Token[] = [
T.load(1, 1),
T.text_1(source, 1, 6),
T.nest(1, 23),
T.id("Example1", 2, 3),
T.separator(2, 11),
T.id("Example2", 3, 3),
T.separator(3, 11),
T.id("Example3", 4, 3),
T.separator(4, 11),
T.denest(5, 1),
];
const expectation: CartridgeEventContext<CartridgeEvent.Load> = {
type: CartridgeEvent.Load,
code,
tokens,
data: { source, dependencies, comments: [] },
};
const reality = makeLoadEventContext(
code,
tokens,
/*comments=*/ [],
source,
dependencies,
);
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'set_property' event context object", () => {
const code = new CodeBlock();
const name = "property";
const definition: PropertyDefinition = { value: "number" };
const tokens: Token[] = [
T.id(name, 2, 3),
T.setter_1(2, 11),
T.id("number", 2, 13),
];
const expectation: CartridgeEventContext<CartridgeEvent.SetProperty> = {
type: CartridgeEvent.SetProperty,
code,
tokens,
data: { name, definition, comments: [] },
};
const reality = makeSetPropertyEventContext(
code,
tokens,
/*comments=*/ [],
name,
definition,
);
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'struct_close' event context object", () => {
const code = new CodeBlock();
const tokens: Token[] = [];
const expectation: CartridgeEventContext<CartridgeEvent.StructClose> = {
type: CartridgeEvent.StructClose,
code,
tokens,
data: null,
};
const reality = makeStructCloseEventContext(code, tokens);
assertEquals(expectation, reality);
});

Deno.test("successfully makes a 'struct_open' event context object", () => {
const code = new CodeBlock();
const tokens: Token[] = [T.type(1, 1), T.id("Example", 1, 6), T.nest(1, 14)];
const name = "Example";
const expectation: CartridgeEventContext<CartridgeEvent.StructOpen> = {
type: CartridgeEvent.StructOpen,
code,
tokens,
data: {
name,
comments: [],
},
};
const reality = makeStructOpenEventContext(
code,
tokens,
/*comments=*/ [],
name,
);
assertEquals(expectation, reality);
});
6 changes: 2 additions & 4 deletions lib/text_builder/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,23 +50,21 @@ export const makeFileStartEventContext = (
export const makeInlineCommentEventContext = (
code: CodeBlock,
tokens: Token[],
comments: Token[],
): CartridgeEventContext<CartridgeEvent.InlineComment> => ({
type: CartridgeEvent.InlineComment,
code,
tokens,
data: { comments: comments.flatMap(cleanComment) },
data: { comments: tokens.flatMap(cleanComment) },
});

export const makeMultilineCommentEventContext = (
code: CodeBlock,
tokens: Token[],
comments: Token[],
): CartridgeEventContext<CartridgeEvent.MultilineComment> => ({
type: CartridgeEvent.MultilineComment,
code,
tokens,
data: { comments: comments.flatMap(cleanComment) },
data: { comments: tokens.flatMap(cleanComment) },
});

export const makeLoadEventContext = (
Expand Down
2 changes: 2 additions & 0 deletions lib/tokenize/lexicon.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
export enum Lexicon {
Identifier,
Load,
StructOpener,
StructCloser,
TupleOpener,
Expand All @@ -26,6 +27,7 @@ export const LEXICON: LexiconType = new Map<
string | string[] | null
>([
[Lexicon.Identifier, null],
[Lexicon.Load, "load"],
[Lexicon.StructOpener, "{"],
[Lexicon.StructCloser, "}"],
[Lexicon.TupleOpener, "("],
Expand Down
4 changes: 4 additions & 0 deletions lib/tokenize/t.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ type SpecialTokenMaker = (raw: string, line: number, col: number) => Token;
export interface LexiconAliasLayer {
/** `___` — identifier */
id: SpecialTokenMaker;
/** `load` — struct opener */
load: SimpleTokenMaker;
/** `{` — struct opener */
nest: SimpleTokenMaker;
/** `}` — struct closer */
Expand Down Expand Up @@ -52,6 +54,7 @@ export interface LexiconAliasLayer {
const makeSpecialToken: SpecialTokenMaker = (raw, line, col) =>
new Token(raw, line, col);

const LOAD = LEXICON.get(Lexicon.Load) as string;
const NEST = LEXICON.get(Lexicon.StructOpener) as string;
const DENEST = LEXICON.get(Lexicon.StructCloser) as string;
const OPEN_TUPLE = LEXICON.get(Lexicon.TupleOpener) as string;
Expand All @@ -65,6 +68,7 @@ const SEPARATOR = LEXICON.get(Lexicon.Separator) as string;

export const T: LexiconAliasLayer = {
id: makeSpecialToken,
load: (line, col) => new Token(LOAD, line, col),
nest: (line, col) => new Token(NEST, line, col),
denest: (line, col) => new Token(DENEST, line, col),
open_tuple: (line, col) => new Token(OPEN_TUPLE, line, col),
Expand Down
8 changes: 8 additions & 0 deletions lib/tokenize/token.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,11 @@
// TODO(@ethandavidson): test `getKindOf` method
// TODO(@ethandavidson): test `toString` method
// TODO(@ethandavidson): test `value` method

import { assertEquals } from "../../deps/std/testing.ts";
import { Token } from "./token.ts";
import { Lexicon } from "./lexicon.ts";

Deno.test("creates a token with an empty string without crashing", () => {
assertEquals(new Token("").kind, Lexicon.Unknown);
});
22 changes: 22 additions & 0 deletions lib/tokenize/tokenize.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -309,3 +309,25 @@ spec Example
const reality = [...tokenize(input)];
assertEquals(expectation, reality);
});

Deno.test("yields tokens of proper `load` statement", () => {
const input = `load "./example.fart" {
Example1,
Example2,
Example3,
}`;
const expectation = [
T.load(1, 1),
T.text_1("./example.fart", 1, 6),
T.nest(1, 23),
T.id("Example1", 2, 3),
T.separator(2, 11),
T.id("Example2", 3, 3),
T.separator(3, 11),
T.id("Example3", 4, 3),
T.separator(4, 11),
T.denest(5, 1),
];
const reality = [...tokenize(input)];
assertEquals(expectation, reality);
});

1 comment on commit b2bd98d

@deno-deploy
Copy link

@deno-deploy deno-deploy bot commented on b2bd98d Dec 8, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Failed to deploy:

failed to fetch 'https://raw.githubusercontent.com/EthanThatOneKid/fart/b2bd98d982bfa082408b9d3b75ae8358d18f6e7a/std/server/worker.ts': HTTP status client error (404 Not Found) for url (https://raw.githubusercontent.com/EthanThatOneKid/fart/b2bd98d982bfa082408b9d3b75ae8358d18f6e7a/std/server/worker.ts)

Please sign in to comment.