return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
-exports.m1TType = void 0;
+exports.tkTreeToSExp = void 0;
var fs = require('fs');
-const node_process_1 = require("node:process");
-const tk = __importStar(require("./tokenize.js"));
const util = __importStar(require("util"));
+const p = __importStar(require("typescript-parsec"));
/**
- * debug reprensenting
- */
-let repr = (x) => { return util.inspect(x, { depth: null }); };
-/**
- * concated 2 `tkTree`s
- * @param x the array to be concated
- * @param y the item or array to ve concated
- * @returns concated tkTree array, or thrown error if can't be concated.
- */
-function concat(x, y) {
- if (Array.isArray(x)) {
- return x.concat(y);
- }
- else {
- throw new Error("the tkTree can't be concated, because it's not an array.");
- }
-}
-function slice(x, index, end) {
- if (Array.isArray(x)) {
- return x.slice(index, end);
- }
- else {
- throw new Error("the tkTree can't be concated, because it's not an array.");
- }
-}
-/**
- * @description
- * match one token type.
*
- * it returns a function which test if the type of first token of the `remained` part of
- * the argument of the function is `typ` , if it's true, update the `TokenMatcheePair` wrapped
- * in `Some`. Otherwise, it returns `None`.
- * * @param typ : the type to be test.
- * @returns the updated `TokenMatcheePair` wrapped in `Some(x)` or `None`.
+ * # REPRESENTATION
*/
-function m1TType(typ) {
- return (m) => {
- if (m.remained.length == 0) {
- return { _tag: "None" };
- }
- /**
- * token to be matched
- * */
- const ttbm = m.remained[0];
- if (ttbm.type == typ) {
- let new_matched = m.matched.concat(ttbm);
- let result = {
- _tag: "Some", value: {
- matched: new_matched,
- remained: m.remained.slice(1),
- ast: ([ttbm]),
- }
- };
- return result;
- }
- else {
- return { _tag: "None" };
- }
- };
-}
-exports.m1TType = m1TType;
-;
/**
- * type int
+ * convert a `tkTree` AST to S-expr string
+ * @param t the `tkTree`
+ * @returns S-expr String
*/
-let tInt = m1TType(tk.TokenType.INT);
-let tAdd = m1TType(tk.TokenType.I_ADD);
-let tMul = m1TType(tk.TokenType.I_MUL);
-node_process_1.argv.forEach((val, index) => {
- console.log(`${index}=${val}`);
-});
-/**
- * like `m ==> f` in ocaml
- * @param m matchee wrapped
- * @param f matching function
- * @returns wrapped result
- */
-function thenDo(m, f) {
- if (m._tag == "None") {
- return m;
+function tkTreeToSExp(t) {
+ var str = "";
+ if (Array.isArray(t)) {
+ let strArray = t.map((x) => tkTreeToSExp(x));
+ str = "(" + strArray.join(" ") + ")";
}
else {
- var a = f(m.value);
- if (a._tag == "Some") {
- a.value.ast = concat(m.value.ast, a.value.ast);
- }
- return a;
- }
-}
-/**
- * like `f1 | f2` in regex
- * @param f1 the first tried function
- * @param f2 the second tried function
- * @returns wrapped result
- */
-function orDo(f1, f2) {
- return (x) => {
- let res1 = f1(x);
- if (res1._tag == "Some") {
- return res1;
+ if (t === undefined) {
+ str = "%undefined";
}
else {
- let res2 = f2(x);
- return res2;
+ str = t;
}
- };
-}
-let midfix = (f, signal) => (x) => {
- var a = f(x);
- if (a._tag == "Some") {
- let ast_head = slice(a.value.ast, 0, a.value.ast.length - 3);
- let ast_tail = slice(a.value.ast, a.value.ast.length - 3);
- let new_ast = [ast_tail];
- a.value.ast = new_ast;
- console.log("+" + signal + "+" + repr(a));
}
- return a;
-};
-/**
- *
- * fac1 = int MUL int
- */
-//let fac1 = midfix((x : TokenMatcheePair)=>
-// thenDo(thenDo(thenDo(tk.toSome(x), tInt), tMul), tInt));
-let fac1 = (x) => {
- let a = midfix((x) => thenDo(thenDo(thenDo(tk.toSome(x), tInt), tMul), tInt), "fac1")(x);
- return a;
-};
-/**
- *
- * fac2 = int MUL int
- */
-let fac2 = tInt;
+ return str;
+}
+exports.tkTreeToSExp = tkTreeToSExp;
+/**inspect the inner of the representation. */
+let repr = (x) => { return util.inspect(x, { depth: null }); };
+var TokenKind;
+(function (TokenKind) {
+ TokenKind[TokenKind["Seperator"] = 0] = "Seperator";
+ TokenKind[TokenKind["Semicolon"] = 1] = "Semicolon";
+ TokenKind[TokenKind["Number"] = 2] = "Number";
+ TokenKind[TokenKind["Op"] = 3] = "Op";
+ TokenKind[TokenKind["ExprMark"] = 4] = "ExprMark";
+ TokenKind[TokenKind["Paren"] = 5] = "Paren";
+ TokenKind[TokenKind["SpaceNL"] = 6] = "SpaceNL";
+ TokenKind[TokenKind["Id"] = 7] = "Id";
+ TokenKind[TokenKind["Str"] = 8] = "Str";
+})(TokenKind || (TokenKind = {}));
/**
- * fac = fac1 | fac2
+ * Parsing
*/
-let fac = orDo(fac1, fac2);
+const lexer = p.buildLexer([
+ [true, /^\d+(\.\d+)?/g, TokenKind.Number],
+ [true, /^\;/g, TokenKind.Semicolon],
+ [true, /^[-][-][-]/g, TokenKind.Seperator],
+ [true, /^[\+\-\*\/\&\|\!\^\<\>\~\=\?]+/g, TokenKind.Op],
+ [true, /^\@+/g, TokenKind.ExprMark],
+ [true, /^[()\[\]{}]/g, TokenKind.Paren],
+ [true, /^["]([\"]|[\\].)*["]/g, TokenKind.Str],
+ [true, /^[']([\']|[\\].)*[']/g, TokenKind.Str],
+ [true, /^[()\[\]{}]/g, TokenKind.Paren],
+ [true, /^[^\s\n\t\r;]+/g, TokenKind.Id],
+ [false, /^(\s|\n|\r|\t)+/g, TokenKind.SpaceNL]
+]);
/**
*
- * expr1 = fac ADD fac
+ * # TEST
*/
-let expr1 = midfix((x) => thenDo(thenDo(thenDo(tk.toSome(x), fac), tAdd), fac), "expr1");
-/**
- * expr2 = fac
- */
-let expr2 = fac;
-/**
- * expr = expr1 | expr2
- */
-let expr = orDo(expr1, expr2);
-let tokens = tk.tokenize("2+3"); //tk.tokenize(argv[2]);
-let tokensFiltered = tokens.filter((x) => {
- return (x.type != tk.TokenType.NL
- && x.type != tk.TokenType.SP);
-});
-let wrappedTokens = tk.toSome({
- matched: [],
- remained: tokensFiltered,
- ast: []
-});
-let beta = expr({
- matched: [],
- remained: tokensFiltered,
- ast: []
-});
-console.log(repr(wrappedTokens));
-console.log(repr(beta));
+const inputTxt = `import ast;
+---
+122`;
+const PROG = p.rule();
+const UNIT = p.rule();
+const IMPORTS = p.rule();
+const SEMICOLON = p.rule();
+let doubleMinus = { type: 'Punctuator', value: '--' };
+let doubleMinus2 = p.str('--');
+const TERM = p.rule();
+function applyUnit(value) {
+ return value.text;
+}
+function applySemiColon(value) {
+ return value.text;
+}
+function applyParts(first, second) {
+ return ["%clo", first, second[1]];
+}
+PROG.setPattern(p.lrec_sc(IMPORTS, p.seq(p.str('---'), UNIT), applyParts));
+function applyImports(input) {
+ let importTail = input[1].map(x => x.text);
+ return ["import"].concat(importTail);
+}
+;
+IMPORTS.setPattern(p.apply(p.seq(p.str('import'), p.rep_sc(p.tok(TokenKind.Id)), SEMICOLON), applyImports));
+SEMICOLON.setPattern(p.apply(p.tok(TokenKind.Semicolon), applySemiColon));
+UNIT.setPattern(p.apply(p.tok(TokenKind.Number), applyUnit));
+let tree = p.expectSingleResult(p.expectEOF(PROG.parse(lexer.parse(inputTxt))));
+console.log("RESULT=" + tkTreeToSExp(tree));