]>
git.kianting.info Git - clo/blob - src/index.ts
26a690eb3b71659a119668c7e4c5f202faf04a39
1 var fs
= require('fs');
2 import jsTokens from
"js-tokens";
3 import * as util from
'util';
4 import * as p from
'typescript-parsec';
5 import { Token
} from
'typescript-parsec';
6 import { TokenType
} from
"./tokenize";
12 * convert a `tkTree` AST to S-expr string
13 * @param t the `tkTree`
14 * @returns S-expr String
16 export function tkTreeToSExp(t
: tkTree
): string{
19 if (Array.isArray(t
)){
20 let strArray
= t
.map((x
)=>tkTreeToSExp(x
));
21 str
= "(" + strArray
.join(" ") + ")";
33 /**inspect the inner of the representation. */
34 let repr
= (x
: any)=>{return util
.inspect(x
, {depth
: null})};
41 type tkTree
= string | tkTree
[];
60 const lexer
= p
.buildLexer([
61 [true, /^\d
+(\
.\d
+)?/g
, TokenKind
.Number],
62 [true, /^\\\@
/g
, TokenKind
.ExcapeAt
],
63 [true, /^\
/\
*([^/]|\
/[^*])*\
*\
//g, TokenKind.Comment],
64 [true, /^\
;/g
, TokenKind
.Semicolon
],
65 [true, /^[-][-][-]/g
, TokenKind
.Seperator
],
66 [true, /^[\
+\
-\
*\
/\
&\
|\
!\
^\
<\
>\
~\
=\?]+/g
, TokenKind
.Op
],
67 [true, /^\@
/g
, TokenKind
.ExprMark
],
68 [true, /^[()\
[\
]{}]/g
, TokenKind
.Paren
],
69 [true, /^["]([\"]|[\\].)*["]/g
, TokenKind
.Str
],
70 [true, /^[']([\']|[\\].)*[']/g
, TokenKind
.Str
],
71 [true, /^[()\
[\
]{}]/g
, TokenKind
.Paren
],
72 [true, /^[^\
/\\\@\s
\n\t\r;]+/g
, TokenKind
.Id
],
73 [true, /^(\s
|\n|\r|\t)+/g
, TokenKind
.SpaceNL
],
82 `import a as b; /*bacourt*/
100 const PROG
= p
.rule
<TokenKind
, tkTree
>();
101 const SEGMENT
= p
.rule
<TokenKind
, tkTree
>();
102 const IMPORT
= p
.rule
<TokenKind
, tkTree
>();
103 const IMPORTS
= p
.rule
<TokenKind
, tkTree
>();
104 const SEMICOLON
= p
.rule
<TokenKind
, tkTree
>();
105 const EXCAPE_AT
= p
.rule
<TokenKind
, tkTree
>();
106 const NOT_AT_TEXT
= p
.rule
<TokenKind
, tkTree
>();
107 const CONTENT
= p
.rule
<TokenKind
, tkTree
>();
109 let doubleMinus
= { type: 'Punctuator', value
: '--' };
110 let doubleMinus2
= p
.str('--');
111 const TERM
= p
.rule
<TokenKind
, tkTree
>();
113 function applySegment(input
: [Token
<TokenKind
>, Token
<TokenKind
>[],
114 Token
<TokenKind
>]): tkTree
[]{
115 let unpackedInnerExprs
= input
[1].map((x
)=>{return x
.text
});
116 return ["%exprs", unpackedInnerExprs
];
119 function applySemiColon(value
: Token
<TokenKind
.Semicolon
>): tkTree
{
123 function applyParts(first
: tkTree
,
124 second
: [Token
<TokenKind
>, tkTree
]):tkTree
{
125 return ["%clo", first
, second
[1]];
129 function applyComment(value
: Token
<TokenKind
.Comment
>): tkTree
[]{
134 function applyImport(input
: [Token
<TokenKind
>,Token
<TokenKind
>[], tkTree
]) : tkTree
{
135 let importTail
= input
[1].map(x
=>x
.text
);
136 return ["import"].concat(importTail
);
141 function applyImportComment(input: [Token<TokenKind>,Token<TokenKind>[],
142 tkTree, Token<TokenKind.Comment>]) : tkTree{
143 let importTail = input[1].map(x=>x.text);
144 let comment = [input[3].text];
145 return ["import"].concat(importTail).concat(comment);
148 function applyImports(input
: [tkTree
, tkTree
[]]): tkTree
{
149 let resultBody
= [input
[0]].concat(input
[1]);
150 let resultWrapper
= ["%import", resultBody
];
151 return resultWrapper
;
157 function applyNotAtText(value
: Token
<TokenKind
>): tkTree
{
158 if (value
.text
== "\\\@"){
161 else{return value
.text
;}
164 function applyText (input
: tkTree
): tkTree
[]{
165 return ["%text", input
];
168 function applyContent(input
: tkTree
[]): tkTree
[]{
169 return ["%content", input
];
172 function applySpaceNL(value
: Token
<TokenKind
.SpaceNL
>): tkTree
{
177 * IMPORTEE: Number, Op, Paren, Id, Str, Comment,
179 let IMPORTEE
= p
.alt(p
.tok(TokenKind
.Number),
181 p
.tok(TokenKind
.Paren
),
183 p
.tok(TokenKind
.Str
),
184 p
.tok(TokenKind
.SpaceNL
),
185 p
.tok(TokenKind
.Comment
));
187 let NOT_AT
= p
.alt(p
.tok(TokenKind
.Seperator
),
188 p
.tok(TokenKind
.Semicolon
),
189 p
.tok(TokenKind
.Number),
190 p
.tok(TokenKind
.ExcapeAt
),
192 p
.tok(TokenKind
.Paren
),
193 p
.tok(TokenKind
.SpaceNL
),
195 p
.tok(TokenKind
.Str
),
196 p
.tok(TokenKind
.Comment
),
200 * PROG : IMPORTS '---' CONTENT;
203 p
.lrec_sc(IMPORTS
, p
.seq(p
.str('---'), CONTENT
), applyParts
)
208 * NOT_AT_TEXT : NOT_AT
210 NOT_AT_TEXT
.setPattern(
211 p
.apply(NOT_AT
, applyNotAtText
)
215 p
.apply( p
.seq(IMPORT
, p
.rep(IMPORT
)), applyImports
)
220 * 'import' IMPORTEE* SEMICOLON |
225 p
.apply(p
.seq(p
.str('import'), p
.rep_sc(IMPORTEE
), SEMICOLON
),
227 p
.apply(p
.tok(TokenKind
.Comment
), applyComment
),
228 p
.apply(p
.tok(TokenKind
.SpaceNL
), applySpaceNL
)
236 SEMICOLON
.setPattern(
237 p
.apply(p
.tok(TokenKind
.Semicolon
), applySemiColon
)
243 * SEGMENT : '@' NOT_AT* '@' |
244 * (NOT_AT_TEXT | EXCAPE_AT)*
248 p
.apply(p
.rep_sc(NOT_AT_TEXT
), applyText
),
249 p
.apply(p
.seq(p
.str('@'), p
.rep(NOT_AT
), p
.str('@')), applySegment
),
257 p
.apply(p
.rep(SEGMENT
), applyContent
)
261 let tree
= p
.expectSingleResult(p
.expectEOF(PROG
.parse(lexer
.parse(inputTxt
))));
265 console
.log("RESULT="+tkTreeToSExp(tree
));