X-Git-Url: https://git.kianting.info/?a=blobdiff_plain;f=src%2Findex.ts;h=157b1607348020eac4b7e8648093eb77ff8059c6;hb=8d03cc503c747bb974c75d39f8b9c0678a9cc91f;hp=a8d103b5d550e29e63fe0e707a9b36b3b10bd090;hpb=6f2e788329da7702ea96dc28ae04499917ec8152;p=clo diff --git a/src/index.ts b/src/index.ts index a8d103b..157b160 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,120 +1,145 @@ var fs = require('fs'); -import { argv } from 'node:process'; -import * as tk from './tokenize.js'; +import jsTokens from "js-tokens"; import * as util from 'util'; - +import * as p from 'typescript-parsec'; +import { Token } from 'typescript-parsec'; +/** + * + * # REPRESENTATION + */ /** - * token tree type. + * convert a `tkTree` AST to S-expr string + * @param t the `tkTree` + * @returns S-expr String */ -type tkTree = tk.Token[] | tk.Token +export function tkTreeToSExp(t: tkTree): string{ + var str = ""; -export interface TokenMatcheePair { - matched: tkTree[] - remained: tk.Token[] + if (Array.isArray(t)){ + let strArray = t.map((x)=>tkTreeToSExp(x)); + str = "(" + strArray.join(" ") + ")"; + }else{ + if (t=== undefined){ + str = "%undefined" + }else{ + str = t; + } + } + + return str; } +/**inspect the inner of the representation. */ +let repr = (x : any)=>{return util.inspect(x, {depth: null})}; /** - * @description - * match one token type. * - * it returns a function which test if the type of first token of the `remained` part of - * the argument of the function is `typ` , if it's true, update the `TokenMatcheePair` wrapped - * in `Some`. Otherwise, it returns `None`. - * * @param typ : the type to be test. - * @returns the updated `TokenMatcheePair` wrapped in `Some(x)` or `None`. + * # TYPES */ -export function m1TType(typ: tk.TokenType): - (m: TokenMatcheePair) => tk.Maybe { - return (m: TokenMatcheePair) => { - if (m.remained.length == 0) { - return { _tag: "None" }; - } - /** - * token to be matched - * */ - const ttbm = m.remained[0]; - - if (ttbm.type == typ) { - m.matched.push(ttbm); - return { - _tag: "Some", value: { - matched: m.matched, - remained: m.remained.slice(1) - } - }; - } - else { - return { _tag: "None" }; - } - } -}; -let toSome = tk.toSome; -let thenDo = tk.thenDo; -let orDo = tk.orDo; +type tkTree = string | tkTree[]; -argv.forEach((val, index) => { - console.log(`${index}=${val}`); -}); +enum TokenKind { + Seperator, + Semicolon, + Number, + Op, + ExprMark, + Paren, + SpaceNL, + Id, + Str, +} -let commandInput = argv[2]; -let commandInputTokenized = tk.tokenize(commandInput); -console.log(commandInputTokenized); +/** + * Parsing + */ +const lexer = p.buildLexer([ + [true, /^\d+(\.\d+)?/g, TokenKind.Number], + [true, /^\;/g, TokenKind.Semicolon], + [true, /^[-][-][-]/g, TokenKind.Seperator], + [true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op], + [true, /^\@+/g, TokenKind.ExprMark], + [true, /^[()\[\]{}]/g, TokenKind.Paren], + [true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str], + [true, /^[']([\']|[\\].)*[']/g, TokenKind.Str], + [true, /^[()\[\]{}]/g, TokenKind.Paren], + [true, /^[^\s\n\t\r;]+/g, TokenKind.Id], + [false, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL] +]); /** - * matchee pair of commandInputTokenized + * + * # TEST */ -let commandTPair : TokenMatcheePair = {matched:[], - remained: commandInputTokenized}; +const inputTxt= +`import ast; +--- +122`; -let tInt = m1TType(tk.TokenType.INT); -let tFlo = m1TType(tk.TokenType.FLO); -let tStr = m1TType(tk.TokenType.STR); -function tBool (x : TokenMatcheePair) :tk.Maybe { - let text = x.remained[0].text - if (text == "true" || text == "false"){ - return thenDo(toSome(x), m1TType(tk.TokenType.ID)); - }else{ - return {_tag : "None"}; - } +const PROG = p.rule(); +const UNIT = p.rule(); +const IMPORTS = p.rule(); +const SEMICOLON = p.rule(); + + +let doubleMinus = { type: 'Punctuator', value: '--' }; +let doubleMinus2 = p.str('--'); +const TERM = p.rule(); + +function applyUnit(value: Token): tkTree{ + return value.text; +} + +function applySemiColon(value: Token): tkTree{ + return value.text; } +function applyParts(first: tkTree, + second: [Token, tkTree]):tkTree { + return ["%clo", first , second[1]]; +} + + + + +function applyImports(input: [Token,Token[], tkTree]):tkTree{ + let importTail = input[1].map(x=>x.text); + return ["import"].concat(importTail); +}; + /** - * define the right hand side of a grammar - * eg. `LHS ::= a + b` - * @param process the right hand side processing : eg. `a + b` in `LHS` - * @param arrange define the order (0 starting) of the elements of the result. - * ast. : eg. `a + c` is `1 0 2` `(+ a c)` - * @returns the processed ast. + * PROG : IMPORTS '---' UNIT; */ -function gramRHS (process: Function, arrange : number[]){ - return (m : TokenMatcheePair)=>{ +PROG.setPattern( + p.lrec_sc(IMPORTS, p.seq(p.str('---'), UNIT), applyParts) - let result : tk.Maybe = process(m); - console.log(`result ${result}`) - if (result._tag == "None"){ - return result; - } - else{ - let matched = result.value.matched; - let return_array : tkTree[] = Array(arrange.length); +) - arrange.forEach((val, index) => { - return_array[arrange[index]] = matched[index]; - }); +/** + * PROG : 'import' Id* SEMICOLON; + */ +IMPORTS.setPattern( + p.apply(p.seq(p.str('import'), p.rep_sc(p.tok(TokenKind.Id)), SEMICOLON) , applyImports) +); - return return_array; - } - } -} +/** + * SEMICOLON : ';'; + */ +SEMICOLON.setPattern( + p.apply(p.tok(TokenKind.Semicolon), applySemiColon) +); /** - * CONST ::= INT | STR | FLO | BOOL + * UNIT : Number; */ -var constParser = gramRHS((x : TokenMatcheePair)=> - {return thenDo(toSome(x),orDo(orDo(orDo(tInt,tFlo),tStr),tBool))}, [0]); +UNIT.setPattern( + p.apply(p.tok(TokenKind.Number), applyUnit) +); + +let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt)))); + + -let tree = constParser(commandTPair); -console.log(util.inspect(tree, { showHidden: true, depth: null })); +console.log("RESULT="+tkTreeToSExp(tree));