X-Git-Url: https://git.kianting.info/?a=blobdiff_plain;f=src%2Findex.ts;h=157b1607348020eac4b7e8648093eb77ff8059c6;hb=8d03cc503c747bb974c75d39f8b9c0678a9cc91f;hp=4c1ce91924e0cfe689858d9ee19fc43e8890e0e2;hpb=f801ef14fc25ae122aaef4aede639ddb9b37006a;p=clo diff --git a/src/index.ts b/src/index.ts index 4c1ce91..157b160 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,224 +1,145 @@ var fs = require('fs'); -import { argv, resourceUsage } from 'node:process'; -import * as tk from './tokenize.js'; +import jsTokens from "js-tokens"; import * as util from 'util'; -import { drawEllipsePath, reduceRotation } from 'pdf-lib'; -import { isTypedArray } from 'node:util/types'; -import { error } from 'node:console'; - +import * as p from 'typescript-parsec'; +import { Token } from 'typescript-parsec'; /** - * debug reprensenting + * + * # REPRESENTATION */ -let repr = (x : any)=>{return util.inspect(x, {depth: null})}; - /** - * token tree type. + * convert a `tkTree` AST to S-expr string + * @param t the `tkTree` + * @returns S-expr String */ -type tkTree = tkTree[] | tk.Token +export function tkTreeToSExp(t: tkTree): string{ + var str = ""; -/** - * concated 2 `tkTree`s - * @param x the array to be concated - * @param y the item or array to ve concated - * @returns concated tkTree array, or thrown error if can't be concated. - */ -function concat(x: tkTree, y:tkTree): tkTree[] { - if (Array.isArray(x)){ - return x.concat(y); + if (Array.isArray(t)){ + let strArray = t.map((x)=>tkTreeToSExp(x)); + str = "(" + strArray.join(" ") + ")"; }else{ - throw new Error("the tkTree can't be concated, because it's not an array."); - + if (t=== undefined){ + str = "%undefined" + }else{ + str = t; + } } -} -function slice(x: tkTree, index?:number, end?:number): tkTree[] { - if (Array.isArray(x)){ - return x.slice(index,end); - }else{ - throw new Error("the tkTree can't be concated, because it's not an array."); - - } + return str; } +/**inspect the inner of the representation. */ +let repr = (x : any)=>{return util.inspect(x, {depth: null})}; /** - * TokenMatcheePair for tokens' parser combinator - * matched: the matched (now and before) tokens - * remained: tokens to be matched - * ast: abstract syntax tree + * + * # TYPES */ -export interface TokenMatcheePair { - matched: tk.Token[] - remained: tk.Token[] - ast : tkTree[] + + +type tkTree = string | tkTree[]; + +enum TokenKind { + Seperator, + Semicolon, + Number, + Op, + ExprMark, + Paren, + SpaceNL, + Id, + Str, } /** - * @description - * match one token type. - * - * it returns a function which test if the type of first token of the `remained` part of - * the argument of the function is `typ` , if it's true, update the `TokenMatcheePair` wrapped - * in `Some`. Otherwise, it returns `None`. - * * @param typ : the type to be test. - * @returns the updated `TokenMatcheePair` wrapped in `Some(x)` or `None`. + * Parsing */ -export function m1TType(typ: tk.TokenType): - (m: TokenMatcheePair) => tk.Maybe { - return (m: TokenMatcheePair) => { - if (m.remained.length == 0) { - return { _tag: "None" }; - } - /** - * token to be matched - * */ - const ttbm = m.remained[0]; - - if (ttbm.type == typ) { - let new_matched = m.matched.concat(ttbm); - let result : tk.Some = { - _tag: "Some", value: { - matched: new_matched, - remained: m.remained.slice(1), - ast: ([ttbm]), - } - }; - return result; - } - else { - return { _tag: "None" }; - } - } -}; +const lexer = p.buildLexer([ + [true, /^\d+(\.\d+)?/g, TokenKind.Number], + [true, /^\;/g, TokenKind.Semicolon], + [true, /^[-][-][-]/g, TokenKind.Seperator], + [true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op], + [true, /^\@+/g, TokenKind.ExprMark], + [true, /^[()\[\]{}]/g, TokenKind.Paren], + [true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str], + [true, /^[']([\']|[\\].)*[']/g, TokenKind.Str], + [true, /^[()\[\]{}]/g, TokenKind.Paren], + [true, /^[^\s\n\t\r;]+/g, TokenKind.Id], + [false, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL] +]); /** - * type int + * + * # TEST */ -let tInt = m1TType(tk.TokenType.INT); -let tAdd = m1TType(tk.TokenType.I_ADD); -let tMul = m1TType(tk.TokenType.I_MUL); +const inputTxt= +`import ast; +--- +122`; -argv.forEach((val, index) => { - console.log(`${index}=${val}`); -}); +const PROG = p.rule(); +const UNIT = p.rule(); +const IMPORTS = p.rule(); +const SEMICOLON = p.rule(); -/** - * like `m ==> f` in ocaml - * @param m matchee wrapped - * @param f matching function - * @returns wrapped result - */ -function thenDo(m : tk.Maybe, f : Function){ - if (m._tag == "None"){ - return m; - }else{ - var a : tk.Maybe = f(m.value); - if (a._tag == "Some"){ - a.value.ast = concat(m.value.ast, a.value.ast); - } +let doubleMinus = { type: 'Punctuator', value: '--' }; +let doubleMinus2 = p.str('--'); +const TERM = p.rule(); - return a; - } +function applyUnit(value: Token): tkTree{ + return value.text; } -/** - * like `f1 | f2` in regex - * @param f1 the first tried function - * @param f2 the second tried function - * @returns wrapped result - */ -function orDo(f1 : Function, f2 : Function){ - return (x : TokenMatcheePair) =>{ - let res1 : tk.Maybe = f1(x); - if (res1._tag == "Some"){ - return res1; - }else{ - let res2 : tk.Maybe = f2(x); - return res2; - } - } - +function applySemiColon(value: Token): tkTree{ + return value.text; } -let midfix = (f : Function, signal? : string) => (x : TokenMatcheePair)=>{ - var a = f(x); - if (a._tag == "Some"){ - let ast_head : tkTree[] = slice(a.value.ast,0,a.value.ast.length-3); - let ast_tail : tkTree[] = slice(a.value.ast,a.value.ast.length-3); - let new_ast = [ast_tail]; - a.value.ast = new_ast; - - console.log("+"+signal+"+"+repr(a)); - - - } - return a; +function applyParts(first: tkTree, + second: [Token, tkTree]):tkTree { + return ["%clo", first , second[1]]; } -/** - * - * fac1 = int MUL int - */ -//let fac1 = midfix((x : TokenMatcheePair)=> -// thenDo(thenDo(thenDo(tk.toSome(x), tInt), tMul), tInt)); -let fac1 = (x : TokenMatcheePair) => { - let a = midfix((x : TokenMatcheePair)=> - thenDo(thenDo(thenDo(tk.toSome(x), tInt), tMul), tInt), "fac1")(x); - return a; -} - -/** - * - * fac2 = int MUL int - */ -let fac2 = tInt; +function applyImports(input: [Token,Token[], tkTree]):tkTree{ + let importTail = input[1].map(x=>x.text); + return ["import"].concat(importTail); +}; /** - * fac = fac1 | fac2 + * PROG : IMPORTS '---' UNIT; */ -let fac = orDo(fac1, fac2); - +PROG.setPattern( + p.lrec_sc(IMPORTS, p.seq(p.str('---'), UNIT), applyParts) + +) /** - * - * expr1 = fac ADD fac + * PROG : 'import' Id* SEMICOLON; */ -let expr1 = midfix((x : TokenMatcheePair)=> - thenDo(thenDo(thenDo(tk.toSome(x), fac), tAdd), fac), "expr1"); +IMPORTS.setPattern( + p.apply(p.seq(p.str('import'), p.rep_sc(p.tok(TokenKind.Id)), SEMICOLON) , applyImports) +); + /** - * expr2 = fac + * SEMICOLON : ';'; */ -let expr2 = fac; +SEMICOLON.setPattern( + p.apply(p.tok(TokenKind.Semicolon), applySemiColon) +); /** - * expr = expr1 | expr2 + * UNIT : Number; */ -let expr = orDo(expr1, expr2); - - - - -let tokens = tk.tokenize("2+3*4");//tk.tokenize(argv[2]); -let tokensFiltered = tokens.filter( - (x)=>{return (x.type != tk.TokenType.NL - && x.type != tk.TokenType.SP)}); - -let wrappedTokens : tk.Maybe = - tk.toSome({ - matched : [] , - remained : tokensFiltered, - ast : []}); +UNIT.setPattern( + p.apply(p.tok(TokenKind.Number), applyUnit) +); -let beta = expr({ - matched : [] , - remained : tokensFiltered, - ast : []}); +let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt)))); -console.log(repr(wrappedTokens)); -console.log(repr(beta)); +console.log("RESULT="+tkTreeToSExp(tree));