From 7e50e1b30226f50c2f2a1032772c87befcc9ccf4 Mon Sep 17 00:00:00 2001 From: EthanThatOneKid <31261035+EthanThatOneKid@users.noreply.github.com> Date: Thu, 11 Nov 2021 21:32:42 -0800 Subject: [PATCH] yeye --- lib/tokenize/lexicon.ts | 34 +++++++++++++++++++--------------- lib/tokenize/token.ts | 21 +++++++++++++++++++++ 2 files changed, 40 insertions(+), 15 deletions(-) diff --git a/lib/tokenize/lexicon.ts b/lib/tokenize/lexicon.ts index 95123be..e335ee1 100644 --- a/lib/tokenize/lexicon.ts +++ b/lib/tokenize/lexicon.ts @@ -11,22 +11,26 @@ export enum Lexicon { Modifier, TextWrapper, Comment, + CommentOpener, + CommentCloser, Unknown, EOF, } -export const LEXICON = { - [Lexicon.Identifier]: null, - [Lexicon.StructOpener]: "{", - [Lexicon.StructCloser]: "}", - [Lexicon.TupleOpener]: "(", - [Lexicon.TupleCloser]: ")", - [Lexicon.TypeDefiner]: ["type", "struct", "interface"], - [Lexicon.PropertyDefiner]: ":", - [Lexicon.PropertyOptionalMarker]: "?", - [Lexicon.Modifier]: ["%", "mod"], - [Lexicon.TextWrapper]: ['"', "'", "`"], - [Lexicon.Comment]: [";", "//"], - [Lexicon.Unknown]: null, - [Lexicon.EOF]: null, -} as const; +export const LEXICON = new Map([ + [Lexicon.Identifier, null], + [Lexicon.StructOpener, "{"], + [Lexicon.StructCloser, "}"], + [Lexicon.TupleOpener, "("], + [Lexicon.TupleCloser, ")"], + [Lexicon.TypeDefiner, ["type", "struct", "interface"]], + [Lexicon.PropertyDefiner, ":"], + [Lexicon.PropertyOptionalMarker, "?"], + [Lexicon.Modifier, ["%", "mod"]], + [Lexicon.TextWrapper, ['"', "'", "`"]], + [Lexicon.Comment, [";", "//"]], + [Lexicon.CommentOpener, "/*"], + [Lexicon.CommentCloser, "*/"], + [Lexicon.Unknown, null], + [Lexicon.EOF, null], +}; diff --git a/lib/tokenize/token.ts b/lib/tokenize/token.ts index e69de29..62fc591 100644 --- a/lib/tokenize/token.ts +++ b/lib/tokenize/token.ts @@ -0,0 +1,21 @@ +import { LEXICON, Lexicon } from "./lexicon.ts"; + +export class Token { + public kind: Lexicon | null = null; + public line: number = -1; + public column: number = -1; + + constructor( + private raw: string, + line: number, + column: number, + noCheck = false, + ) { + this.line = line; + this.column = column; + this.kind = noCheck ? Lexicon.Identifier : Token.getKindOf(raw); + } + + // https://github.com/EthanThatOneKid/fart/blob/c43f2333458b2cbc40d167610d87e2a2e3f89885/lib/tokenize/token.ts?_pjax=%23js-repo-pjax-container%2C%20div%5Bitemtype%3D%22http%3A%2F%2Fschema.org%2FSoftwareSourceCode%22%5D%20main%2C%20%5Bdata-pjax-container%5D#L48 + static getKindOf(raw: string): Lexicon | null {} +}