var fs = require('fs');
-import { argv, resourceUsage } from 'node:process';
-import * as tk from './tokenize.js';
+import jsTokens from "js-tokens";
import * as util from 'util';
-import { drawEllipsePath, reduceRotation } from 'pdf-lib';
-import { isAnyArrayBuffer, isTypedArray } from 'node:util/types';
-import { error } from 'node:console';
-import { isUndefined } from 'node:util';
-
-/**
- * debug reprensenting
- */
-let repr = (x : any)=>{return util.inspect(x, {depth: null})};
-
+import * as p from 'typescript-parsec';
+import { Token } from 'typescript-parsec';
+import { TokenType } from "./tokenize";
/**
- * token tree type.
- */
-type tkTree = tkTree[] | tk.Token
-
-/**
- * concated 2 `tkTree`s
- * @param x the array to be concated
- * @param y the item or array to ve concated
- * @returns concated tkTree array, or thrown error if can't be concated.
- */
-function concat(x: tkTree, y:tkTree): tkTree[] {
- if (Array.isArray(x)){
- return x.concat(y);
- }else{
- throw new Error("the tkTree can't be concated, because it's not an array.");
-
- }
-}
-
-function slice(x: tkTree, index?:number, end?:number): tkTree[] {
- if (Array.isArray(x)){
- return x.slice(index,end);
- }else{
- throw new Error("the tkTree can't be concated, because it's not an array.");
-
- }
-}
-
-/**
- * TokenMatcheePair for tokens' parser combinator
- *
- * matched: the matched (now and before) tokens
- *
- * remained: tokens to be matched
*
- * ast: abstract syntax tree
+ * # REPRESENTATION
*/
-export interface TokenMatcheePair {
- matched: tk.Token[]
- remained: tk.Token[]
- ast : tkTree[]
-}
-
/**
* convert a `tkTree` AST to S-expr string
* @param t the `tkTree`
if (t=== undefined){
str = "%undefined"
}else{
- str = t.text;
+ str = t;
}
}
return str;
}
+/**inspect the inner of the representation. */
+let repr = (x : any)=>{return util.inspect(x, {depth: null})};
/**
- * @description
- * match one token type.
*
- * it returns a function which test if the type of first token of the `remained` part of
- * the argument of the function is `typ` , if it's true, update the `TokenMatcheePair` wrapped
- * in `Some`. Otherwise, it returns `None`.
- * * @param typ : the type to be test.
- * @returns the updated `TokenMatcheePair` wrapped in `Some(x)` or `None`.
+ * # TYPES
*/
-export function m1TType(typ: tk.TokenType):
- (m: TokenMatcheePair) => tk.Maybe<TokenMatcheePair> {
- return (m: TokenMatcheePair) => {
- if (m.remained.length == 0) {
- return { _tag: "None" };
- }
- /**
- * token to be matched
- * */
- const ttbm = m.remained[0];
-
- if (ttbm.type == typ) {
- let new_matched = m.matched.concat(ttbm);
- let result : tk.Some<TokenMatcheePair> = {
- _tag: "Some", value: {
- matched: new_matched,
- remained: m.remained.slice(1),
- ast: ([ttbm]),
- }
- };
- return result;
- }
- else {
- return { _tag: "None" };
- }
- }
-};
-
-/**
- * type int
- */
-let tInt = m1TType(tk.TokenType.INT);
-let tId = m1TType(tk.TokenType.ID);
-
-
-let tAdd = m1TType(tk.TokenType.I_ADD);
-let tSub = m1TType(tk.TokenType.I_SUB);
-let tMul = m1TType(tk.TokenType.I_MUL);
-let tDiv = m1TType(tk.TokenType.I_DIV);
-let tLParen = m1TType(tk.TokenType.L_PAREN);
-let tRParen = m1TType(tk.TokenType.R_PAREN);
-let toSome = tk.toSome;
-
-argv.forEach((val, index) => {
- console.log(`${index}=${val}`);
-});
-
-
-/**
- * like `m ==> f` in ocaml
- * @param m matchee wrapped
- * @param f matching function
- * @returns wrapped result
- */
-function thenDo(m : tk.Maybe<TokenMatcheePair>, f : Function){
- if (m._tag == "None"){
- return m;
- }else{
- var a : tk.Maybe<TokenMatcheePair> = f(m.value);
- if (a._tag == "Some"){
- a.value.ast = concat(m.value.ast, a.value.ast);
- }
-
- return a;
- }
+type tkTree = string | tkTree[];
+
+enum TokenKind {
+ Seperator, // ---
+ Semicolon, // ;
+ Number,
+ Op,
+ ExprMark, // @
+ ExcapeAt, // \@
+ Paren,
+ SpaceNL, // \s\t\n\r
+ Id,
+ Str,
+ Comment, // /* ooo */
}
/**
- * like `f1 | f2` in regex
- * @param f1 the first tried function
- * @param f2 the second tried function
- * @returns wrapped result
+ * Parsing
*/
-function orDo(f1 : Function, f2 : Function){
- return (x : TokenMatcheePair) =>{
- let res1 : tk.Maybe<TokenMatcheePair> = f1(x);
- if (res1._tag == "Some"){
- return res1;
- }else{
- let res2 : tk.Maybe<TokenMatcheePair> = f2(x);
- return res2;
- }
- }
-}
-
+const lexer = p.buildLexer([
+ [true, /^\d+(\.\d+)?/g, TokenKind.Number],
+ [true, /^\\\@/g, TokenKind.ExcapeAt],
+ [true, /^\/\*([^/]|\/[^*])*\*\//g, TokenKind.Comment],
+ [true, /^\;/g, TokenKind.Semicolon],
+ [true, /^[-][-][-]/g, TokenKind.Seperator],
+ [true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op],
+ [true, /^\@/g, TokenKind.ExprMark],
+ [true, /^[()\[\]{}]/g, TokenKind.Paren],
+ [true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str],
+ [true, /^[']([\']|[\\].)*[']/g, TokenKind.Str],
+ [true, /^[()\[\]{}]/g, TokenKind.Paren],
+ [true, /^[^\/\\\@\s\n\t\r;]+/g, TokenKind.Id],
+ [true, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL],
+
+]);
/**
*
- * @param m : the `MatcheePair` to be consumed.
- * @returns if the length of `m.remained` >= 1; consumes the matchee by 1 token
- * and wraps it in `Some`,
- * otherwise, returns `None`.
+ * # TEST
*/
-export function matchAny(m: TokenMatcheePair): tk.Maybe<TokenMatcheePair> {
- if (m.remained.length >= 1) {
- return {
- _tag: "Some", value: {
- matched: m.matched.concat(m.remained[0]),
- remained: m.remained.slice(1),
- ast : [m.remained[0]],
- }
- };
- } else {
- return { _tag: "None" };
- }
-}
+const inputTxt=
+`import a as b; /*bacourt*/
+/* ba choir
+ipsum lorem*/
-/**
-* @description repeating matching function `f`
-* zero or more times, like the asterisk `*` in regex `f*` .
-* @param f : the function to be repeated 0+ times.
-* @returns:the combined function
-*/
-export function OnceOrMoreDo(f: Function): (x: TokenMatcheePair) =>
- tk.Maybe<TokenMatcheePair> {
- return (x) => {
- var wrappedOldX: tk.Maybe<TokenMatcheePair> = { _tag: "Some", value: x };
- var wrappedNewX: tk.Maybe<TokenMatcheePair> = wrappedOldX;
+import you as john;
+---
- var counter = -1;
+臺中市\\\@
- while (wrappedNewX._tag != "None") {
- wrappedOldX = wrappedNewX;
- wrappedNewX = thenDo(wrappedOldX, f);
- counter += 1;
+公園
+@1+2==3;
- };
+console.log("122");@
+山頂
+`;
- if (counter <= 0){
- return { _tag: "None"};
- }
- let ast = wrappedOldX.value.ast ;
- wrappedOldX.value.ast =ast.slice(ast.length-counter);
- console.log(repr(wrappedOldX.value.ast));
- return wrappedOldX; };
-}
+const PROG = p.rule<TokenKind, tkTree>();
+const SEGMENT = p.rule<TokenKind, tkTree>();
+const IMPORT = p.rule<TokenKind, tkTree>();
+const IMPORTS = p.rule<TokenKind, tkTree>();
+const SEMICOLON = p.rule<TokenKind, tkTree>();
+const EXCAPE_AT = p.rule<TokenKind, tkTree>();
+const NOT_AT_TEXT = p.rule<TokenKind, tkTree>();
+const CONTENT = p.rule<TokenKind, tkTree>();
-/**
- * aux function for midfix operator
- * @param f function
- * @param signal the rule name
- * @returns
- */
-let midfix = (f : Function, signal? : string) => (x : TokenMatcheePair)=>{
- var a = f(x);
- if (a._tag == "Some"){
- let ast_tail : tkTree[] = slice(a.value.ast,a.value.ast.length-3);
- let new_ast = [ast_tail];
- a.value.ast = new_ast;
+let doubleMinus = { type: 'Punctuator', value: '--' };
+let doubleMinus2 = p.str('--');
+const TERM = p.rule<TokenKind, tkTree>();
- console.log("+"+signal+"+"+repr(a));
+function applySegment(input: [Token<TokenKind>, Token<TokenKind>[],
+ Token<TokenKind>]): tkTree[]{
+ let unpackedInnerExprs = input[1].map((x)=>{return x.text});
+ return ["%exprs", unpackedInnerExprs];
+}
-
- }
- return a;
+function applySemiColon(value: Token<TokenKind.Semicolon>): tkTree{
+ return value.text;
}
-let circumfix = (f : Function, signal? : string) => (x : TokenMatcheePair)=>{
- var a = f(x);
- if (a._tag == "Some"){
- let inner = a.value.ast[a.value.ast.length-2];
- let ast_middle : tkTree[] = [inner];
- let new_ast = [ast_middle];
- a.value.ast = new_ast;
- }
- return a;
+function applyParts(first: tkTree,
+ second: [Token<TokenKind>, tkTree]):tkTree {
+ return ["%clo", first , second[1]];
}
-/**
- * TODO: 12(13)(14) only parsed with only 12(13)
- */
-/** single1 = tInt | "(" expr ")"*/
-let single1 = circumfix((x : TokenMatcheePair) =>
- thenDo(thenDo(thenDo(tk.toSome(x), tLParen), expr), tRParen), "fac1");
-let single2= tInt;
-let single = orDo(single1, single2);
-
-/** func = single | single "(" single ")"
- * i.e.
- *
- * func = single | func_aux ( int )
- *
-*/
+function applyComment(value: Token<TokenKind.Comment>): tkTree[]{
+ return [value.text];
+}
-/** fac = single ["(" single ")"]? | single */
-let fac1Appliee = circumfix((x : TokenMatcheePair) => thenDo(thenDo(thenDo(tk.toSome(x), tLParen), tInt), tRParen), "fac1");
-let fac1 = (x : TokenMatcheePair) =>
- {
- let raw = thenDo(thenDo(toSome(x), single), OnceOrMoreDo(fac1Appliee));
- console.log("+"+"火鳥"+"+"+repr(raw));
-
-
- if (raw._tag == "Some"){
+function applyImport(input: [Token<TokenKind>,Token<TokenKind>[], tkTree]) : tkTree{
+ let importTail = input[1].map(x=>x.text);
+ return ["import"].concat(importTail);
+};
- var result : tkTree = raw.value.ast[0];
- let applyToken : tk.Token = {text: '%apply', ln:0, col:0};
- for (var i=1; i<raw.value.ast.length; i++){
- result = [applyToken, result, raw.value.ast[i]];
- }
+/*
+function applyImportComment(input: [Token<TokenKind>,Token<TokenKind>[],
+ tkTree, Token<TokenKind.Comment>]) : tkTree{
+ let importTail = input[1].map(x=>x.text);
+ let comment = [input[3].text];
+ return ["import"].concat(importTail).concat(comment);
+};*/
- console.log("+"+"hitori"+"+"+repr(result));
+function applyImports(input : [tkTree, tkTree[]]): tkTree{
+ let resultBody = [input[0]].concat(input[1]);
+ let resultWrapper = ["%import", resultBody];
+ return resultWrapper;
+};
- if (!Array.isArray(result)){
- raw.value.ast = [result];
- }else{
- raw.value.ast = result;
- }
- }
-
-
- return raw;
- };
-let fac2 = single;
-let fac = orDo(fac1, fac2);
+function applyNotAtText(value : Token<TokenKind>): tkTree{
+ if (value.text == "\\\@"){
+ return '@';
+ }
+ else{return value.text;}
+};
+function applyText (input : tkTree): tkTree[]{
+ return ["%text", input];
+};
-/**
- *
- * term1 = fac (MUL | DIV) fac
- */
+function applyContent(input : tkTree[]): tkTree[]{
+ return ["%content", input];
+};
-let term1 = midfix((x : TokenMatcheePair)=>
- thenDo(thenDo(thenDo(tk.toSome(x), fac), orDo(tMul,tDiv)), fac), "term1");
+function applySpaceNL(value : Token<TokenKind.SpaceNL>): tkTree{
+ return value.text;
+}
-
/**
- *
- * term2 = int MUL int
+ * IMPORTEE: Number, Op, Paren, Id, Str, Comment,
*/
-let term2 = fac;
+let IMPORTEE = p.alt(p.tok(TokenKind.Number),
+ p.tok(TokenKind.Op),
+ p.tok(TokenKind.Paren),
+ p.tok(TokenKind.Id),
+ p.tok(TokenKind.Str),
+ p.tok(TokenKind.SpaceNL),
+ p.tok(TokenKind.Comment));
+
+let NOT_AT = p.alt(p.tok(TokenKind.Seperator),
+ p.tok(TokenKind.Semicolon),
+ p.tok(TokenKind.Number),
+ p.tok(TokenKind.ExcapeAt),
+ p.tok(TokenKind.Op),
+ p.tok(TokenKind.Paren),
+ p.tok(TokenKind.SpaceNL),
+ p.tok(TokenKind.Id),
+ p.tok(TokenKind.Str),
+ p.tok(TokenKind.Comment),
+ );
/**
- * term = term1 | term2
+ * PROG : IMPORTS '---' CONTENT;
*/
-let term = orDo(term1, term2);
-
+PROG.setPattern(
+ p.lrec_sc(IMPORTS, p.seq(p.str('---'), CONTENT), applyParts)
+
+)
/**
- *
- * expr1 = term ADD term
+ * NOT_AT_TEXT : NOT_AT
*/
-let expr1 = midfix((x : TokenMatcheePair)=>
- thenDo(thenDo(thenDo(tk.toSome(x), term), orDo(tAdd,tSub)), term), "expr1");
+NOT_AT_TEXT.setPattern(
+ p.apply(NOT_AT, applyNotAtText)
+);
+
+IMPORTS.setPattern(
+ p.apply( p.seq(IMPORT, p.rep(IMPORT)), applyImports)
+);
+
/**
- * expr2 = term
+ * IMPORT :
+ * 'import' IMPORTEE* SEMICOLON |
+ * COMMENT |
*/
-let expr2 = term;
+IMPORT.setPattern(
+ p.alt(
+ p.apply(p.seq(p.str('import'), p.rep_sc(IMPORTEE), SEMICOLON),
+ applyImport),
+ p.apply(p.tok(TokenKind.Comment), applyComment),
+ p.apply(p.tok(TokenKind.SpaceNL), applySpaceNL)
+
+ )
+);
/**
- * expr = expr1 | expr2
+ * SEMICOLON : ';';
*/
-let expr = orDo(expr1, expr2);
-
+SEMICOLON.setPattern(
+ p.apply(p.tok(TokenKind.Semicolon), applySemiColon)
+);
-let tokens = tk.tokenize("1");
-//let tokens = tk.tokenize("(4-(3/4))");
-//tk.tokenize(argv[2]);
+/**
+ * SEGMENT : '@' NOT_AT* '@' |
+ * (NOT_AT_TEXT | EXCAPE_AT)*
+ */
+SEGMENT.setPattern(
+ p.alt(
+ p.apply(p.rep_sc(NOT_AT_TEXT), applyText),
+ p.apply(p.seq(p.str('@'), p.rep(NOT_AT), p.str('@')), applySegment),
+ )
+);
-let tokensFiltered = tokens.filter(
- (x)=>{return (x.type != tk.TokenType.NL
- && x.type != tk.TokenType.SP)});
+/**
+ * CONTENT : SEGMENT*
+ */
+CONTENT.setPattern(
+ p.apply(p.rep(SEGMENT), applyContent)
+);
-let wrappedTokens : tk.Maybe<TokenMatcheePair> =
- tk.toSome({
- matched : [] ,
- remained : tokensFiltered,
- ast : []});
-let beta = expr({
- matched : [] ,
- remained : tokensFiltered,
- ast : []});
+let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt))));
-if (beta._tag == "Some"){
- console.log(tkTreeToSExp(beta.value.ast));
-}
-console.log("RESULT="+repr(beta));
+console.log("RESULT="+tkTreeToSExp(tree));