Skip to content

Commit

Permalink
Add Char type for ensuring priority is one char
Browse files Browse the repository at this point in the history
  • Loading branch information
GuiltyDolphin committed Jul 8, 2021
1 parent 1eb8e13 commit fd51047
Show file tree
Hide file tree
Showing 7 changed files with 40 additions and 14 deletions.
19 changes: 19 additions & 0 deletions packages/orga/src/char.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
export type Char = string & { length: 1 };

export const isChar = (c: string): c is Char => c.length === 1;

export function assertChar(c: string): asserts c is Char {
if (!isChar(c)) {
throw new Error('expected string of length 1');
}
};

export const char = (c: string): Char => {
assertChar(c);
return c;
}

export const charAt = (c: string, n: number): Char | null => {
const ch = c.charAt(n);
return ch ? char(ch) : null;
}
19 changes: 10 additions & 9 deletions packages/orga/src/tokenize/__tests__/headline.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
tokPriority,
} from './util';

import { char, Char } from '../../char';
import { Token } from '../../types';

describe("tokenize headline", () => {
Expand All @@ -25,8 +26,8 @@ describe("tokenize headline", () => {
testHeadline("** a headline", 2, [tokText("a headline")]),
testHeadline("***** a headline", 5, [tokText("a headline")]),
testHeadline("* a 😀line", 1, [tokText("a 😀line")]),
testHeadline("* TODO [#A] a headline :tag1:tag2:", 1, [tokTodo("TODO", true), tokPriority("A"), tokText("a headline"), tokTags(["tag1", "tag2"])]),
testHeadline("* TODO [#A] a headline :tag1:123:#hash:@at:org-mode:under_score:98%:", 1, [tokTodo("TODO", true), tokPriority("A"), tokText("a headline"), tokTags(["tag1", "123", "#hash", "@at", "org-mode", "under_score", "98%"])]),
testHeadline("* TODO [#A] a headline :tag1:tag2:", 1, [tokTodo("TODO", true), tokPriority(char("A")), tokText("a headline"), tokTags(["tag1", "tag2"])]),
testHeadline("* TODO [#A] a headline :tag1:123:#hash:@at:org-mode:under_score:98%:", 1, [tokTodo("TODO", true), tokPriority(char("A")), tokText("a headline"), tokTags(["tag1", "123", "#hash", "@at", "org-mode", "under_score", "98%"])]),
]);

testLexer("DONE todo keyword", ...testHeadline("* DONE heading", 1, [tokTodo("DONE", false), tokText("heading")]));
Expand Down Expand Up @@ -55,8 +56,8 @@ describe("tokenize headline", () => {
testLexer("with space is keyword", ...testHeadline("* TODO ", 1, [tokTodo("TODO", true)]));
});
describe("priority cookie", () => {
testLexer("without space is cookie", ...testHeadline("* [#A]", 1, [tokPriority("A")]));
testLexer("with space is cookie", ...testHeadline("* [#A] ", 1, [tokPriority("A")]));
testLexer("without space is cookie", ...testHeadline("* [#A]", 1, [tokPriority(char("A"))]));
testLexer("with space is cookie", ...testHeadline("* [#A] ", 1, [tokPriority(char("A"))]));
});
describe("tags", () => {
// ambigious in v2021.07.03 spec, but Org parser does it like this (2021-07-06)
Expand All @@ -72,21 +73,21 @@ describe("tokenize headline", () => {
testHeadline("** DONE", 2, [tokTodo("DONE", false)]),
testHeadline("*** Some e-mail", 3, [tokText("Some e-mail")]),
// TODO: 'COMMENT' should be treated specially here according to the spec
testHeadline("* TODO [#A] COMMENT Title :tag:a2%:", 1, [tokTodo("TODO", true), tokPriority("A"), tokText("COMMENT Title"), tokTags(["tag", "a2%"])]),
testHeadline("* TODO [#A] COMMENT Title :tag:a2%:", 1, [tokTodo("TODO", true), tokPriority(char("A")), tokText("COMMENT Title"), tokTags(["tag", "a2%"])]),
]);

describe("priority cookies", () => {
testLexer('empty priority cookie is text', ...testHeadline("* [#]", 1, [tokText("[#]")]));
testLexer('uppercase letter is ok', ...testHeadline("* [#A]", 1, [tokPriority("A")]));
testLexer('lowercase letter is ok', ...testHeadline("* [#a]", 1, [tokPriority("a")]));
testLexer('uppercase letter is ok', ...testHeadline("* [#A]", 1, [tokPriority(char("A"))]));
testLexer('lowercase letter is ok', ...testHeadline("* [#a]", 1, [tokPriority(char("a"))]));
// v2021.07.03 of the spec says that the priority is "a single
// letter" - it is ambiguous as to whether this means 'character',
// or includes digits etc., but the Org parser currently accepts
// any single (ASCII) character tried (including ']') except
// newline (2021-07-06)
testLexerMulti('nonletters okay', [
testLexerMulti('nonletters okay', ([
'1', '-', '_', '?', '#', ' ', '\t', '', '\\', ']',
].map(c => testHeadline(`* [#${c}]`, 1, [tokPriority(c)])));
] as Char[]).map(c => testHeadline(`* [#${c}]`, 1, [tokPriority(c)])));

testLexer('newline not okay', '* [#\n]', [tokStars(1), tokText('[#'), tokNewline(), tokText(']')]);

Expand Down
3 changes: 2 additions & 1 deletion packages/orga/src/tokenize/__tests__/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import {
Token,
} from '../types';

import { Char } from '../../char';
import tok from './tok';
import { ParseOptions } from '../../options'
import * as tk from '../util';
Expand Down Expand Up @@ -109,7 +110,7 @@ export const tokTodo = (keyword: string, actionable: boolean, extra: Extra<Todo,
tk.tokTodo(keyword, actionable, { _text: keyword, ...extra });

/** Priority cookie token. */
export const tokPriority = (value: string, extra: Extra<Priority, 'value'> = {}): Priority =>
export const tokPriority = (value: Char, extra: Extra<Priority, 'value'> = {}): Priority =>
tk.tokPriority(value, { _text: `[#${value}]`, ...extra });

export const tokHorizontalRule = (extra: Extra<HorizontalRule> = {}): HorizontalRule =>
Expand Down
3 changes: 2 additions & 1 deletion packages/orga/src/tokenize/headline.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
tokTags,
tokTodo
} from './util';
import { charAt } from '../char';

interface Props {
reader: Reader;
Expand Down Expand Up @@ -48,7 +49,7 @@ export default ({ reader, todoKeywordSets }: Props): Token[] => {
const priority = eat(/^\[#(.)\]/)
if (!isEmpty(priority.position)) {
const { value, ...rest } = priority;
buffer.push(tokPriority(value.charAt(2), rest));
buffer.push(tokPriority(charAt(value, 2), rest));
}

eat('whitespaces')
Expand Down
2 changes: 2 additions & 0 deletions packages/orga/src/tokenize/types.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Node, Literal as UnistLiteral } from 'unist';
import { Char } from '../char';

export interface TokenI extends Node {
_text?: string | undefined;
Expand Down Expand Up @@ -34,6 +35,7 @@ export interface HorizontalRule extends TokenI {

export interface Priority extends TokenLiteral {
type: 'priority';
value: Char;
}

export interface Tags extends TokenI {
Expand Down
5 changes: 3 additions & 2 deletions packages/orga/src/tokenize/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import {
Todo,
Token,
} from './types';
import { Char } from '../char';

type Extra<Tok extends Token, Keys extends keyof Tok = 'type'> = Partial<Omit<Tok, Keys | 'type'>>;

Expand Down Expand Up @@ -134,9 +135,9 @@ export const tokTodo = (keyword: string, actionable: boolean, extra: Extra<Todo,
});

/** Priority cookie token. */
export const tokPriority = (value: string, extra: Extra<Priority, 'value'> = {}): Priority => ({
export const tokPriority = (value: Char, extra: Extra<Priority, 'value'> = {}): Priority => ({
type: 'priority',
value: `[#${value}]`,
value,
...extra,
});

Expand Down
3 changes: 2 additions & 1 deletion packages/orga/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Literal as UnistLiteral, Node, Parent as UnistParent } from 'unist'
import { Char } from './char';

export { Node } from 'unist';

Expand Down Expand Up @@ -142,7 +143,7 @@ export interface Headline extends Parent, Child<Document | Headline> {
level: number;
keyword?: string;
actionable: boolean;
priority?: string;
priority?: Char;
content: string;
tags?: string[];
// v2021.07.03 - "A headline contains directly one section
Expand Down

0 comments on commit fd51047

Please sign in to comment.