added boolean logic on the DSL types :)
This commit is contained in:
parent
3e32551f0a
commit
4c8d17311f
16 changed files with 624 additions and 186 deletions
|
@ -1,31 +1,32 @@
|
||||||
proposal DoExpression{
|
proposal DoExpression{
|
||||||
pair arrowFunction{
|
pair arrowFunction{
|
||||||
applicable to {
|
applicable to {
|
||||||
"() => {
|
"let <<ident:Identifier>> = () => {
|
||||||
<<blockStatements: anyStatementList | hello >>
|
<<statements: NMinus1Statements >>
|
||||||
return << returnExpr: Expr >>
|
return <<returnVal : Expression | Identifier>>;
|
||||||
}"
|
}
|
||||||
|
"
|
||||||
}
|
}
|
||||||
transform to {
|
transform to {
|
||||||
"do {
|
"let <<ident>> = do {
|
||||||
<< blockStatements >>
|
<<statements>>
|
||||||
<< returnExpr >>
|
<<returnVal>>
|
||||||
}"
|
}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pair immediatelyInvokedUnnamedFunction {
|
pair immediatelyInvokedUnnamedFunction {
|
||||||
applicable to {
|
applicable to {
|
||||||
"function(){
|
"let <<ident:Identifier>> = function(){
|
||||||
<<blockStatements: anyNStatements>>
|
<<statements: NMinus1Statements >>
|
||||||
return << returnExpr: Expr >>
|
return <<returnVal : Expression | Identifier>>;
|
||||||
}();"
|
}();"
|
||||||
}
|
}
|
||||||
|
|
||||||
transform to {
|
transform to {
|
||||||
"do {
|
"let <<ident>> = do {
|
||||||
<< blockStatements >>
|
<<statements>>
|
||||||
<< returnExpr >>
|
<<returnVal>>
|
||||||
}"
|
}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
proposal MultiStmt{
|
proposal MultiStmt{
|
||||||
pair Smthn{
|
pair Smthn{
|
||||||
applicable to{
|
applicable to{
|
||||||
"let <<something:Identifier>> = <<aiai:Identifier | MemberExpression>>();
|
"let <<ident1:Identifier>> = <<funcIdent:Identifier | MemberExpression>>();
|
||||||
let <<binaryExprLOL:Identifier>> = 1 + 1;
|
let <<ident2:Identifier>> = <<expr:Expression>>;
|
||||||
"
|
"
|
||||||
}
|
}
|
||||||
|
|
||||||
transform to {
|
transform to {
|
||||||
"() => {
|
"const ident2 = () => {
|
||||||
let <<something>> = <<aiai>>();
|
let <<ident1>> = <<funcIdent>>();
|
||||||
return <<binaryExprLOL>>;
|
return <<expr>>;
|
||||||
}"
|
}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,24 +1,11 @@
|
||||||
proposal Pipeline{
|
proposal Pipeline{
|
||||||
pair SingleArgument {
|
pair SingleArgument {
|
||||||
applicable to {
|
applicable to {
|
||||||
"<<someFunctionIdent:Identifier | MemberExpression>>(<<someFunctionParam: Expression | Identifier>>);"
|
"<<someFunctionIdent:Identifier || MemberExpression>>(<<someFunctionParam: Expression>>);"
|
||||||
}
|
}
|
||||||
|
|
||||||
transform to {
|
transform to {
|
||||||
"<<someFunctionParam>> |> <<someFunctionIdent>>(%);"
|
"<<someFunctionParam>> |> <<someFunctionIdent>>(%);"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pair MultiArgument {
|
|
||||||
applicable to {
|
|
||||||
"<<someFunctionIdent:Identifier>>(
|
|
||||||
<<firstFunctionParam : Expression | Identifier>>,
|
|
||||||
<<restOfFunctionParams: anyRest>>
|
|
||||||
);"
|
|
||||||
}
|
|
||||||
|
|
||||||
transform to {
|
|
||||||
"<<firstFunctionParam>> |> <<someFunctionIdent>>(%, <<restOfFunctionParams>>);"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -4,7 +4,7 @@ proposal test_single_stmt{
|
||||||
"let <<aaaa: Identifier >> = <<bbbb: Expression | Identifier>>"
|
"let <<aaaa: Identifier >> = <<bbbb: Expression | Identifier>>"
|
||||||
}
|
}
|
||||||
transform to {
|
transform to {
|
||||||
"let <<aaaa>> = () => <<bbbb>>"
|
"let <<aaaa>> = 1 + <<bbbb>>;"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
17
grammars/wildcard_grammar.txt
Normal file
17
grammars/wildcard_grammar.txt
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
|
||||||
|
Wildcard:
|
||||||
|
Identifier ":" TypeExpr ("*"?)
|
||||||
|
TypeExpr:
|
||||||
|
BinaryExpr
|
||||||
|
|
||||||
|
BinaryExpr:
|
||||||
|
UnaryExpr { Operator PrimitiveExpr }*
|
||||||
|
|
||||||
|
UnaryExpr:
|
||||||
|
{UnaryOperator}? PrimitiveExpr
|
||||||
|
|
||||||
|
PrimitiveExpr:
|
||||||
|
GroupExpr | Identifier
|
||||||
|
|
||||||
|
GroupExpr:
|
||||||
|
"(" TypeExpr ")"
|
|
@ -2,7 +2,9 @@ function parse() {
|
||||||
const input = ("input" |> document.getElementById(%)).value;
|
const input = ("input" |> document.getElementById(%)).value;
|
||||||
const data = 32 |> input.slice(%);
|
const data = 32 |> input.slice(%);
|
||||||
const compressedData = data |> decode_base64(%);
|
const compressedData = data |> decode_base64(%);
|
||||||
const uncompressed = compressedData |> pako.inflate(%);
|
const uncompressed = pako.inflate(compressedData, {
|
||||||
|
to: "string"
|
||||||
|
});
|
||||||
const json = uncompressed |> JSON.parse(%);
|
const json = uncompressed |> JSON.parse(%);
|
||||||
json |> console.log(%);
|
json |> console.log(%);
|
||||||
json |> convertToDesktop(%);
|
json |> convertToDesktop(%);
|
||||||
|
@ -35,7 +37,9 @@ function convertToDesktop(json) {
|
||||||
...mappedValues,
|
...mappedValues,
|
||||||
...pcSpecificValues
|
...pcSpecificValues
|
||||||
};
|
};
|
||||||
const compressed = newData |> JSON.stringify(%) |> pako.deflate(%);
|
const compressed = pako.deflate(newData |> JSON.stringify(%), {
|
||||||
|
to: "string"
|
||||||
|
});
|
||||||
const base64 = compressed |> btoa(%);
|
const base64 = compressed |> btoa(%);
|
||||||
const finalSaveString = hash + base64;
|
const finalSaveString = hash + base64;
|
||||||
("output_output" |> document.getElementById(%)).innerText = finalSaveString;
|
("output_output" |> document.getElementById(%)).innerText = finalSaveString;
|
||||||
|
@ -76,7 +80,7 @@ function decode_base64(s) {
|
||||||
c,
|
c,
|
||||||
x,
|
x,
|
||||||
l = 0,
|
l = 0,
|
||||||
o = i |> s.substring(%);
|
o = s.substring(i, i + 72);
|
||||||
for (x = 0; x < o.length; x++) {
|
for (x = 0; x < o.length; x++) {
|
||||||
c = e[x |> o.charAt(%)];
|
c = e[x |> o.charAt(%)];
|
||||||
b = (b << 6) + c;
|
b = (b << 6) + c;
|
||||||
|
|
|
@ -8,7 +8,6 @@ import {
|
||||||
transform,
|
transform,
|
||||||
} from "./transform/transform";
|
} from "./transform/transform";
|
||||||
|
|
||||||
import { parseDSLtoAST } from "../didactic-chainsaw-dsl/src/JSTQL_interface/fetchAST";
|
|
||||||
import { parseJSTQL } from "./langium/langiumRunner";
|
import { parseJSTQL } from "./langium/langiumRunner";
|
||||||
const path = "test_files/test.js";
|
const path = "test_files/test.js";
|
||||||
const file = Bun.file(path);
|
const file = Bun.file(path);
|
||||||
|
|
|
@ -2,10 +2,11 @@ import * as t from "@babel/types";
|
||||||
|
|
||||||
import * as babelparser from "@babel/parser";
|
import * as babelparser from "@babel/parser";
|
||||||
import { TreeNode, makeTree, showTree } from "../data_structures/tree";
|
import { TreeNode, makeTree, showTree } from "../data_structures/tree";
|
||||||
import { InternalDSLVariable } from "../parser/parse";
|
import { Wildcard } from "../parser/parse";
|
||||||
|
import generate from "@babel/generator";
|
||||||
|
import { WildcardEvalVisitor } from "./wildcardEvaluator";
|
||||||
|
|
||||||
const keys_to_ignore = ["loc", "start", "end", "type"];
|
const keys_to_ignore = ["loc", "start", "end", "type"];
|
||||||
|
|
||||||
export interface MatchedTreeNode {
|
export interface MatchedTreeNode {
|
||||||
aplToNode: TreeNode<t.Node>;
|
aplToNode: TreeNode<t.Node>;
|
||||||
codeNode: TreeNode<t.Node>;
|
codeNode: TreeNode<t.Node>;
|
||||||
|
@ -16,11 +17,21 @@ export interface PairedNodes {
|
||||||
codeNode: t.Node;
|
codeNode: t.Node;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Match {
|
||||||
|
statements: TreeNode<PairedNodes>[];
|
||||||
|
}
|
||||||
|
|
||||||
|
enum MatchCurrentResult {
|
||||||
|
MatchedWithWildcard,
|
||||||
|
Matched,
|
||||||
|
NoMatch,
|
||||||
|
}
|
||||||
|
|
||||||
export function runMatch(
|
export function runMatch(
|
||||||
code: TreeNode<t.Node>,
|
code: TreeNode<t.Node>,
|
||||||
applicableTo: TreeNode<t.Node>,
|
applicableTo: TreeNode<t.Node>,
|
||||||
internals: InternalDSLVariable
|
internals: Wildcard[]
|
||||||
): TreeNode<PairedNodes>[] {
|
): Match[] {
|
||||||
// Special case for a single expression, we have to remove "ExpressionStatement" node.
|
// Special case for a single expression, we have to remove "ExpressionStatement" node.
|
||||||
if (applicableTo.children.length === 1) {
|
if (applicableTo.children.length === 1) {
|
||||||
if (applicableTo.children[0].element.type === "ExpressionStatement") {
|
if (applicableTo.children[0].element.type === "ExpressionStatement") {
|
||||||
|
@ -42,20 +53,18 @@ export function runMatch(
|
||||||
return matcher.matches;
|
return matcher.matches;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
showTree(code);
|
|
||||||
showTree(applicableTo);
|
|
||||||
|
|
||||||
let matcher = new Matcher(internals, applicableTo.element);
|
let matcher = new Matcher(internals, applicableTo.element);
|
||||||
matcher.multiStatementMatcher(code, applicableTo);
|
matcher.multiStatementMatcher(code, applicableTo);
|
||||||
|
|
||||||
return matcher.matches;
|
return matcher.matches;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Matcher {
|
export class Matcher {
|
||||||
public matches: TreeNode<PairedNodes>[];
|
public matches: Match[];
|
||||||
private internals: InternalDSLVariable;
|
private internals: Wildcard[];
|
||||||
private aplToFull: t.Node;
|
private aplToFull: t.Node;
|
||||||
constructor(internals: InternalDSLVariable, aplToFull: t.Node) {
|
constructor(internals: Wildcard[], aplToFull: t.Node) {
|
||||||
this.matches = [];
|
this.matches = [];
|
||||||
this.internals = internals;
|
this.internals = internals;
|
||||||
this.aplToFull = aplToFull;
|
this.aplToFull = aplToFull;
|
||||||
|
@ -77,27 +86,37 @@ export class Matcher {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Store all full matches
|
// Store all full matches
|
||||||
this.matches.push(...temp);
|
this.matches.push(
|
||||||
|
...temp.map((x) => {
|
||||||
|
return {
|
||||||
|
statements: [x],
|
||||||
|
};
|
||||||
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
// Check if the current matches
|
// Check if the current matches
|
||||||
|
|
||||||
let curMatches = this.checkCodeNode(code.element, aplTo.element);
|
let curMatches = this.checkCodeNode(code.element, aplTo.element);
|
||||||
curMatches =
|
|
||||||
curMatches && code.children.length >= aplTo.children.length;
|
|
||||||
if (!curMatches) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// At this point current does match
|
|
||||||
// Perform a search on each of the children of both AplTo and Code.
|
|
||||||
let pairedCurrent: TreeNode<PairedNodes> = new TreeNode(null, {
|
let pairedCurrent: TreeNode<PairedNodes> = new TreeNode(null, {
|
||||||
codeNode: code.element,
|
codeNode: code.element,
|
||||||
aplToNode: aplTo.element,
|
aplToNode: aplTo.element,
|
||||||
});
|
});
|
||||||
|
if (curMatches === MatchCurrentResult.NoMatch) {
|
||||||
|
return;
|
||||||
|
} else if (curMatches === MatchCurrentResult.MatchedWithWildcard) {
|
||||||
|
return pairedCurrent;
|
||||||
|
} else if (code.children.length !== aplTo.children.length) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// At this point current does match
|
||||||
|
// Perform a search on each of the children of both AplTo and Code.
|
||||||
|
|
||||||
for (let i = 0; i < aplTo.children.length; i++) {
|
for (let i = 0; i < aplTo.children.length; i++) {
|
||||||
let childSearch = this.singleExprMatcher(
|
let childSearch = this.singleExprMatcher(
|
||||||
code.children[i],
|
code.children[i],
|
||||||
aplTo.children[i]
|
aplTo.children[i]
|
||||||
);
|
);
|
||||||
|
|
||||||
if (childSearch === undefined) {
|
if (childSearch === undefined) {
|
||||||
// Failed to get a full match, so early return here
|
// Failed to get a full match, so early return here
|
||||||
return;
|
return;
|
||||||
|
@ -110,6 +129,43 @@ export class Matcher {
|
||||||
return pairedCurrent;
|
return pairedCurrent;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private checkCodeNode(
|
||||||
|
codeNode: t.Node,
|
||||||
|
aplToNode: t.Node
|
||||||
|
): MatchCurrentResult {
|
||||||
|
// First verify the internal DSL variables
|
||||||
|
|
||||||
|
if (aplToNode.type === "Identifier") {
|
||||||
|
for (let wildcard of this.internals) {
|
||||||
|
if (WildcardEvalVisitor.visit(wildcard.expr, codeNode)) {
|
||||||
|
return MatchCurrentResult.MatchedWithWildcard;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (codeNode.type != aplToNode.type) {
|
||||||
|
return MatchCurrentResult.NoMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
//If not an internal DSL variable, gotta verify that the identifier is the same
|
||||||
|
if (codeNode.type === "Identifier" && aplToNode.type === "Identifier") {
|
||||||
|
if (codeNode.name != aplToNode.name) {
|
||||||
|
return MatchCurrentResult.NoMatch;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (let key of Object.keys(aplToNode)) {
|
||||||
|
if (key in keys_to_ignore) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Object.keys(codeNode).includes(key)) {
|
||||||
|
return MatchCurrentResult.NoMatch;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return MatchCurrentResult.Matched;
|
||||||
|
}
|
||||||
|
|
||||||
multiStatementMatcher(code: TreeNode<t.Node>, aplTo: TreeNode<t.Node>) {
|
multiStatementMatcher(code: TreeNode<t.Node>, aplTo: TreeNode<t.Node>) {
|
||||||
if (
|
if (
|
||||||
code.element.type === "Program" ||
|
code.element.type === "Program" ||
|
||||||
|
@ -127,17 +183,18 @@ export class Matcher {
|
||||||
// Sliding window the size of aplTo
|
// Sliding window the size of aplTo
|
||||||
for (let y = 0; y <= code.length - aplTo.length; y++) {
|
for (let y = 0; y <= code.length - aplTo.length; y++) {
|
||||||
let fullMatch = true;
|
let fullMatch = true;
|
||||||
let collection: TreeNode<PairedNodes>[] = [];
|
let statements: TreeNode<PairedNodes>[] = [];
|
||||||
for (let i = 0; i < aplTo.length; i++) {
|
for (let i = 0; i < aplTo.length; i++) {
|
||||||
let res = this.exactExprMatcher(code[i + y], aplTo[i]);
|
let res = this.exactExprMatcher(code[i + y], aplTo[i]);
|
||||||
if (!res) {
|
if (!res) {
|
||||||
fullMatch = false;
|
fullMatch = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
collection.push(res);
|
statements.push(res);
|
||||||
}
|
}
|
||||||
if (fullMatch) {
|
if (fullMatch) {
|
||||||
this.matches.push(...collection);
|
console.log(statements.length);
|
||||||
|
this.matches.push({ statements });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -174,42 +231,4 @@ export class Matcher {
|
||||||
|
|
||||||
return paired;
|
return paired;
|
||||||
}
|
}
|
||||||
|
|
||||||
private checkCodeNode(code_node: t.Node, aplTo: t.Node): boolean {
|
|
||||||
// First verify the internal DSL variables
|
|
||||||
|
|
||||||
if (aplTo.type === "Identifier") {
|
|
||||||
if (aplTo.name in this.internals) {
|
|
||||||
if (this.internals[aplTo.name].includes(code_node.type)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.internals[aplTo.name].includes("Expression")) {
|
|
||||||
return t.isExpression(code_node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (code_node.type != aplTo.type) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
//If not an internal DSL variable, gotta verify that the identifier is the same
|
|
||||||
if (code_node.type === "Identifier" && aplTo.type === "Identifier") {
|
|
||||||
if (code_node.name != aplTo.name) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (let key of Object.keys(aplTo)) {
|
|
||||||
if (key in keys_to_ignore) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!Object.keys(code_node).includes(key)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
42
src/matcher/wildcardEvaluator.ts
Normal file
42
src/matcher/wildcardEvaluator.ts
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import * as t from "@babel/types";
|
||||||
|
import {
|
||||||
|
BinaryExpr,
|
||||||
|
GroupExpr,
|
||||||
|
Identifier,
|
||||||
|
TypeExpr,
|
||||||
|
UnaryExpr,
|
||||||
|
Wildcard,
|
||||||
|
WildcardNode,
|
||||||
|
} from "../parser/parse";
|
||||||
|
|
||||||
|
export class WildcardEvalVisitor {
|
||||||
|
static visit(node: WildcardNode, toComp: t.Node): boolean {
|
||||||
|
switch (node.nodeType) {
|
||||||
|
case "BinaryExpr": {
|
||||||
|
let cur = node as BinaryExpr;
|
||||||
|
let left = this.visit(cur.left, toComp);
|
||||||
|
let right = this.visit(cur.right, toComp);
|
||||||
|
if (cur.op === "&&") {
|
||||||
|
return left && right;
|
||||||
|
} else {
|
||||||
|
return left || right;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case "UnaryExpr": {
|
||||||
|
let cur = node as UnaryExpr;
|
||||||
|
return !this.visit(cur.expr, toComp);
|
||||||
|
}
|
||||||
|
case "GroupExpr": {
|
||||||
|
let cur = node as GroupExpr;
|
||||||
|
return this.visit(cur.expr, toComp);
|
||||||
|
}
|
||||||
|
case "Identifier": {
|
||||||
|
let cur = node as Identifier;
|
||||||
|
if (cur.name === "Expression") {
|
||||||
|
return t.isExpression(toComp);
|
||||||
|
}
|
||||||
|
return cur.name === toComp.type;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,22 +1,17 @@
|
||||||
import * as babelparser from "@babel/parser";
|
import * as babelparser from "@babel/parser";
|
||||||
|
|
||||||
import * as t from "@babel/types";
|
import * as t from "@babel/types";
|
||||||
|
import { WildcardToken, WildcardTokenizer } from "./wildcardTokenizer";
|
||||||
export interface InternalDSLVariable {
|
|
||||||
[internals: string]: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface InternalParseResult {
|
export interface InternalParseResult {
|
||||||
prelude: InternalDSLVariable;
|
prelude: Wildcard[];
|
||||||
cleanedJS: string;
|
cleanedJS: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function parseInternal(code: string): InternalParseResult {
|
export function parseInternalTraTo(code: string): string {
|
||||||
let cleanedJS = "";
|
let cleanedJS = "";
|
||||||
let temp = "";
|
let temp = "";
|
||||||
let flag = false;
|
let flag = false;
|
||||||
let prelude: InternalDSLVariable = {};
|
|
||||||
|
|
||||||
for (let i = 0; i < code.length; i++) {
|
for (let i = 0; i < code.length; i++) {
|
||||||
if (code[i] === "<" && code[i + 1] === "<") {
|
if (code[i] === "<" && code[i + 1] === "<") {
|
||||||
// From now in we are inside of the DSL custom block
|
// From now in we are inside of the DSL custom block
|
||||||
|
@ -28,11 +23,46 @@ export function parseInternal(code: string): InternalParseResult {
|
||||||
if (flag && code[i] === ">" && code[i + 1] === ">") {
|
if (flag && code[i] === ">" && code[i + 1] === ">") {
|
||||||
// We encountered a closing tag
|
// We encountered a closing tag
|
||||||
flag = false;
|
flag = false;
|
||||||
let { identifier, types } = parseInternalString(temp);
|
|
||||||
|
|
||||||
cleanedJS += identifier;
|
cleanedJS += temp;
|
||||||
|
|
||||||
prelude[identifier] = types;
|
i += 1;
|
||||||
|
temp = "";
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (flag) {
|
||||||
|
temp += code[i];
|
||||||
|
} else {
|
||||||
|
cleanedJS += code[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cleanedJS;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseInternalAplTo(code: string): InternalParseResult {
|
||||||
|
let cleanedJS = "";
|
||||||
|
let temp = "";
|
||||||
|
let flag = false;
|
||||||
|
let prelude: Wildcard[] = [];
|
||||||
|
for (let i = 0; i < code.length; i++) {
|
||||||
|
if (code[i] === "<" && code[i + 1] === "<") {
|
||||||
|
// From now in we are inside of the DSL custom block
|
||||||
|
flag = true;
|
||||||
|
i += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (flag && code[i] === ">" && code[i + 1] === ">") {
|
||||||
|
// We encountered a closing tag
|
||||||
|
flag = false;
|
||||||
|
let wildcard = new WildcardParser(
|
||||||
|
new WildcardTokenizer(temp).tokenize()
|
||||||
|
).parse();
|
||||||
|
|
||||||
|
cleanedJS += wildcard.identifier.name;
|
||||||
|
|
||||||
|
prelude.push(wildcard);
|
||||||
i += 1;
|
i += 1;
|
||||||
temp = "";
|
temp = "";
|
||||||
continue;
|
continue;
|
||||||
|
@ -47,25 +77,200 @@ export function parseInternal(code: string): InternalParseResult {
|
||||||
return { prelude, cleanedJS };
|
return { prelude, cleanedJS };
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseInternalString(dslString: string): {
|
export interface Identifier extends WildcardNode {
|
||||||
identifier: string;
|
nodeType: "Identifier";
|
||||||
types: string[];
|
name: string;
|
||||||
} {
|
|
||||||
let [identifier, typeString, ..._] = dslString
|
|
||||||
.replace(/\s/g, "")
|
|
||||||
.split(":");
|
|
||||||
|
|
||||||
if (_.length > 0) {
|
|
||||||
// This is an error, and it means we probably have encountered two bitshift operators
|
|
||||||
throw new Error("Probably encountered bitshift");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Wildcard {
|
||||||
|
nodeType: "Wildcard";
|
||||||
|
identifier: Identifier;
|
||||||
|
expr: TypeExpr;
|
||||||
|
star: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WildcardNode {
|
||||||
|
nodeType: "BinaryExpr" | "UnaryExpr" | "GroupExpr" | "Identifier";
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TypeExpr = BinaryExpr | UnaryExpr | PrimitiveExpr;
|
||||||
|
|
||||||
|
export type BinaryOperator = "||" | "&&";
|
||||||
|
|
||||||
|
export type UnaryOperator = "!";
|
||||||
|
|
||||||
|
export interface BinaryExpr extends WildcardNode {
|
||||||
|
nodeType: "BinaryExpr";
|
||||||
|
left: UnaryExpr | BinaryExpr | PrimitiveExpr;
|
||||||
|
op: BinaryOperator;
|
||||||
|
right: UnaryExpr | BinaryExpr | PrimitiveExpr;
|
||||||
|
}
|
||||||
|
export interface UnaryExpr extends WildcardNode {
|
||||||
|
nodeType: "UnaryExpr";
|
||||||
|
op: UnaryOperator;
|
||||||
|
expr: PrimitiveExpr;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type PrimitiveExpr = GroupExpr | Identifier;
|
||||||
|
|
||||||
|
export interface GroupExpr extends WildcardNode {
|
||||||
|
nodeType: "GroupExpr";
|
||||||
|
expr: TypeExpr;
|
||||||
|
}
|
||||||
|
|
||||||
|
class WildcardParser {
|
||||||
|
private position = -1;
|
||||||
|
|
||||||
|
constructor(private tokens: WildcardToken[]) {}
|
||||||
|
private getCurrentToken() {
|
||||||
|
// 1. Return the element of array `tokens` at the current position.
|
||||||
|
return this.tokens[this.position];
|
||||||
|
}
|
||||||
|
|
||||||
|
private advance(): void {
|
||||||
|
// 1. Increment the value of `currentPosition` by 1.
|
||||||
|
this.position += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
private peek() {
|
||||||
|
// 1. Return the element of array `tokens` at a position immediately after the current position.
|
||||||
|
return this.tokens[this.position + 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
private error() {
|
||||||
|
return new Error(
|
||||||
|
"Parsing failed at position: " +
|
||||||
|
this.position +
|
||||||
|
". The erroneous input token is: " +
|
||||||
|
this.getCurrentToken().value
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
parse(): Wildcard {
|
||||||
|
return this.Wildcard();
|
||||||
|
}
|
||||||
|
|
||||||
|
private Wildcard(): Wildcard {
|
||||||
|
let identifier = this.Identifier();
|
||||||
|
this.Semicolon();
|
||||||
|
let multidenoted = this.TypeExpr();
|
||||||
|
let star = this.Star();
|
||||||
return {
|
return {
|
||||||
|
nodeType: "Wildcard",
|
||||||
identifier,
|
identifier,
|
||||||
types: typeString ? typeString.split("|") : [""],
|
expr: multidenoted,
|
||||||
|
star,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Star(): boolean {
|
||||||
|
if (this.peek() && this.peek().tokenKind === "Star") {
|
||||||
|
this.advance();
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private TypeExpr(): TypeExpr {
|
||||||
|
if (this.peek().tokenKind === "UnaryOperator") {
|
||||||
|
return this.UnaryExpr();
|
||||||
|
} else {
|
||||||
|
return this.BinaryExpr();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private BinaryExpr(): BinaryExpr | UnaryExpr | PrimitiveExpr {
|
||||||
|
let left: UnaryExpr | BinaryExpr | PrimitiveExpr = this.UnaryExpr();
|
||||||
|
while (this.peek() && this.peek().tokenKind === "BinaryOperator") {
|
||||||
|
let op = this.BinaryOperator();
|
||||||
|
let right = this.UnaryExpr();
|
||||||
|
left = {
|
||||||
|
nodeType: "BinaryExpr",
|
||||||
|
left,
|
||||||
|
op,
|
||||||
|
right,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return left;
|
||||||
|
}
|
||||||
|
|
||||||
|
private BinaryOperator(): BinaryOperator {
|
||||||
|
if (this.peek().tokenKind === "BinaryOperator") {
|
||||||
|
this.advance();
|
||||||
|
return this.getCurrentToken().value as BinaryOperator;
|
||||||
|
} else throw this.error();
|
||||||
|
}
|
||||||
|
|
||||||
|
private UnaryExpr(): UnaryExpr | PrimitiveExpr {
|
||||||
|
if (this.peek().tokenKind === "UnaryOperator") {
|
||||||
|
let UnaryOperator = this.UnaryOperator();
|
||||||
|
let expr = this.PrimitiveExpr();
|
||||||
|
return {
|
||||||
|
nodeType: "UnaryExpr",
|
||||||
|
op: UnaryOperator,
|
||||||
|
expr,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return this.PrimitiveExpr();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private UnaryOperator(): UnaryOperator {
|
||||||
|
if (this.peek().tokenKind === "UnaryOperator") {
|
||||||
|
this.advance();
|
||||||
|
return this.getCurrentToken().value as UnaryOperator;
|
||||||
|
} else throw this.error();
|
||||||
|
}
|
||||||
|
|
||||||
|
private PrimitiveExpr(): PrimitiveExpr {
|
||||||
|
if (this.peek().tokenKind === "OpeningParenthesis") {
|
||||||
|
return this.GroupExpr();
|
||||||
|
} else {
|
||||||
|
return this.Identifier();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private GroupExpr(): GroupExpr {
|
||||||
|
this.OpeningParenthesis();
|
||||||
|
let expr = this.TypeExpr();
|
||||||
|
this.ClosingParenthesis();
|
||||||
|
return {
|
||||||
|
nodeType: "GroupExpr",
|
||||||
|
expr,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private OpeningParenthesis() {
|
||||||
|
if (this.peek().tokenKind === "OpeningParenthesis") {
|
||||||
|
this.advance();
|
||||||
|
} else throw this.error();
|
||||||
|
}
|
||||||
|
private ClosingParenthesis() {
|
||||||
|
if (this.peek().tokenKind === "ClosingParenthesis") {
|
||||||
|
this.advance();
|
||||||
|
} else throw this.error();
|
||||||
|
}
|
||||||
|
|
||||||
|
private Semicolon() {
|
||||||
|
if (this.peek().tokenKind === "Semicolon") {
|
||||||
|
this.advance();
|
||||||
|
} else {
|
||||||
|
throw this.error();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private Identifier(): Identifier {
|
||||||
|
if (this.peek().tokenKind === "Identifier") {
|
||||||
|
this.advance();
|
||||||
|
return {
|
||||||
|
nodeType: "Identifier",
|
||||||
|
name: this.getCurrentToken().value,
|
||||||
|
};
|
||||||
|
} else throw this.error();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export function parse_with_plugins(
|
export function parse_with_plugins(
|
||||||
code: string
|
code: string
|
||||||
): babelparser.ParseResult<t.File> {
|
): babelparser.ParseResult<t.File> {
|
||||||
|
@ -73,3 +278,21 @@ export function parse_with_plugins(
|
||||||
plugins: [["pipelineOperator", { proposal: "hack", topicToken: "%" }]],
|
plugins: [["pipelineOperator", { proposal: "hack", topicToken: "%" }]],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function testParser() {
|
||||||
|
console.dir(
|
||||||
|
parseInternalAplTo(
|
||||||
|
"<<someFunctionIdent:Identifier || MemberExpression>>(<<someFunctionParam: Expression || Identifier>>);"
|
||||||
|
),
|
||||||
|
{ depth: null }
|
||||||
|
);
|
||||||
|
|
||||||
|
console.dir(
|
||||||
|
parseInternalAplTo("<<SomeIdent: Statement && !ReturnStatement >>"),
|
||||||
|
{
|
||||||
|
depth: null,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
//testParser();
|
||||||
|
|
132
src/parser/wildcardTokenizer.ts
Normal file
132
src/parser/wildcardTokenizer.ts
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
type TokenKind =
|
||||||
|
| "BinaryOperator"
|
||||||
|
| "UnaryOperator"
|
||||||
|
| "Identifier"
|
||||||
|
| "OpeningParenthesis"
|
||||||
|
| "ClosingParenthesis"
|
||||||
|
| "Star"
|
||||||
|
| "Semicolon";
|
||||||
|
|
||||||
|
export interface WildcardToken {
|
||||||
|
tokenKind: TokenKind;
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WildcardTokenizer {
|
||||||
|
private tokens: WildcardToken[] = [];
|
||||||
|
private current = -1; // Have to start at -1 because first iteration advances
|
||||||
|
private source: string[];
|
||||||
|
constructor(source: string) {
|
||||||
|
this.source = source.split("");
|
||||||
|
}
|
||||||
|
|
||||||
|
tokenize(): WildcardToken[] {
|
||||||
|
while (this.current < this.source.length - 1) {
|
||||||
|
this.scanToken();
|
||||||
|
}
|
||||||
|
return this.tokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
private peek(): string | undefined {
|
||||||
|
return this.source[this.current + 1];
|
||||||
|
}
|
||||||
|
private getCurrent() {
|
||||||
|
return this.source[this.current];
|
||||||
|
}
|
||||||
|
private advance() {
|
||||||
|
this.current += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
private consumeToken(tokenKind: TokenKind, value: string) {
|
||||||
|
this.tokens.push({ tokenKind, value });
|
||||||
|
}
|
||||||
|
|
||||||
|
private scanToken() {
|
||||||
|
this.advance();
|
||||||
|
let char = this.getCurrent();
|
||||||
|
switch (char) {
|
||||||
|
case "(": {
|
||||||
|
this.consumeToken("OpeningParenthesis", char);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case ")": {
|
||||||
|
this.consumeToken("ClosingParenthesis", char);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "|": {
|
||||||
|
if (this.peek() === "|") {
|
||||||
|
this.advance();
|
||||||
|
this.consumeToken("BinaryOperator", "||");
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
"Invalid token given to tokenizer: " + char
|
||||||
|
);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "!": {
|
||||||
|
this.consumeToken("UnaryOperator", char);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "&": {
|
||||||
|
if (this.peek() === "&") {
|
||||||
|
this.advance();
|
||||||
|
this.consumeToken("BinaryOperator", "&&");
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
"Invalid token given to tokenizer: " + char
|
||||||
|
);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "*": {
|
||||||
|
this.consumeToken("Star", char);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case ":": {
|
||||||
|
this.consumeToken("Semicolon", char);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case " ":
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
if (this.isAlpha(char)) {
|
||||||
|
this.consumeAlpha();
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
throw new Error("Invalid token given: " + char);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private consumeAlpha() {
|
||||||
|
let word = "";
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
word += this.getCurrent();
|
||||||
|
|
||||||
|
let next = this.peek();
|
||||||
|
if (next && this.isAlpha(next)) {
|
||||||
|
this.advance();
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.consumeToken("Identifier", word);
|
||||||
|
}
|
||||||
|
private isAlpha(val: string): boolean {
|
||||||
|
let alphabet = new Set(
|
||||||
|
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_".split("")
|
||||||
|
);
|
||||||
|
return alphabet.has(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function testWildcardTokenizer() {
|
||||||
|
let tokenized = new WildcardTokenizer(
|
||||||
|
"aiaiai: ((LOL||!Smack)&&SomethingElse)*"
|
||||||
|
).tokenize();
|
||||||
|
|
||||||
|
console.log(tokenized);
|
||||||
|
}
|
||||||
|
//testWildcardTokenizer();
|
|
@ -4,6 +4,8 @@ import generate from "@babel/generator";
|
||||||
import {
|
import {
|
||||||
InternalDSLVariable,
|
InternalDSLVariable,
|
||||||
parseInternal,
|
parseInternal,
|
||||||
|
parseInternalAplTo,
|
||||||
|
parseInternalTraTo,
|
||||||
parse_with_plugins,
|
parse_with_plugins,
|
||||||
} from "../parser/parse";
|
} from "../parser/parse";
|
||||||
import {
|
import {
|
||||||
|
@ -42,12 +44,10 @@ export function transform(recipe: TransformRecipe, code: string): string {
|
||||||
// We are using JSTQL
|
// We are using JSTQL
|
||||||
// We have to parse JSTQL to the self hosted version
|
// We have to parse JSTQL to the self hosted version
|
||||||
|
|
||||||
let { cleanedJS: applicableTo, prelude } = parseInternal(
|
let { cleanedJS: applicableTo, prelude } = parseInternalAplTo(
|
||||||
recipe.applicableTo
|
recipe.applicableTo
|
||||||
);
|
);
|
||||||
let { cleanedJS: transformTo, prelude: _ } = parseInternal(
|
let transformTo = parseInternalTraTo(recipe.transformTo);
|
||||||
recipe.transformTo
|
|
||||||
);
|
|
||||||
|
|
||||||
return transformSelfHosted(
|
return transformSelfHosted(
|
||||||
{ applicableTo, transformTo },
|
{ applicableTo, transformTo },
|
||||||
|
@ -70,6 +70,7 @@ function transformSelfHosted(
|
||||||
let applicableToTree = makeTree(applicabelToAST);
|
let applicableToTree = makeTree(applicabelToAST);
|
||||||
let transformTo = parse_with_plugins(recipe.transformTo);
|
let transformTo = parse_with_plugins(recipe.transformTo);
|
||||||
let transformToTree = makeTree(transformTo);
|
let transformToTree = makeTree(transformTo);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
codeTree == undefined ||
|
codeTree == undefined ||
|
||||||
applicableToTree == undefined ||
|
applicableToTree == undefined ||
|
||||||
|
@ -78,23 +79,14 @@ function transformSelfHosted(
|
||||||
throw new Error("This no worky LOL");
|
throw new Error("This no worky LOL");
|
||||||
}
|
}
|
||||||
showTree(applicableToTree);
|
showTree(applicableToTree);
|
||||||
|
console.log(generate(codeAST));
|
||||||
|
|
||||||
let matches = runMatch(codeTree, applicableToTree, internals);
|
let matches = runMatch(codeTree, applicableToTree, internals);
|
||||||
|
|
||||||
console.log(matches.length);
|
console.log(matches.length);
|
||||||
for (let match of matches.reverse()) {
|
|
||||||
//console.log(transformToTree.element);
|
|
||||||
// There is a bug here, for some reason it works sometimes when Program and sometimes when File, no clue why?????
|
|
||||||
let output = parse_with_plugins(recipe.transformTo).program;
|
|
||||||
try {
|
|
||||||
transformer(match, transformToTree, output, codeAST);
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
console.log("Final generated code: \n");
|
|
||||||
|
|
||||||
let output = generate(codeAST, { topicToken: "%" }).code;
|
let outputAST = transformer(matches, transformToTree, codeAST, transformTo);
|
||||||
|
|
||||||
|
let output = generate(outputAST, { topicToken: "%" }).code;
|
||||||
//showTree(transformToTree);
|
//showTree(transformToTree);
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,30 +8,56 @@ import {
|
||||||
showTree,
|
showTree,
|
||||||
showTreePaired,
|
showTreePaired,
|
||||||
} from "../data_structures/tree";
|
} from "../data_structures/tree";
|
||||||
import { InternalDSLVariable } from "../parser/parse";
|
import { Match, MatchedTreeNode, PairedNodes } from "../matcher/matcher";
|
||||||
import { MatchedTreeNode, PairedNodes } from "../matcher/matcher";
|
|
||||||
import traverse from "@babel/traverse";
|
import traverse from "@babel/traverse";
|
||||||
import generate from "@babel/generator";
|
import generate from "@babel/generator";
|
||||||
|
import { TransformRecipe } from "./transform";
|
||||||
|
|
||||||
export function transformer(
|
export function transformer(
|
||||||
match: TreeNode<PairedNodes>,
|
matches: Match[],
|
||||||
trnTo: TreeNode<t.Node>,
|
transformTo: TreeNode<t.Node>,
|
||||||
output: t.Node,
|
codeAST: t.Node,
|
||||||
inputCode: t.Node
|
traToAST: t.File
|
||||||
) {
|
): t.Node {
|
||||||
transformMatch(match, trnTo, output);
|
for (let match of matches.reverse()) {
|
||||||
|
try {
|
||||||
if (output.type == "Program") {
|
let traToWithWildcards = structuredClone(traToAST);
|
||||||
output = output.body[0];
|
for (let match_stmt of match.statements) {
|
||||||
|
transformMatch(match_stmt, transformTo, traToWithWildcards);
|
||||||
}
|
}
|
||||||
|
traverse(codeAST, {
|
||||||
traverse(inputCode, {
|
|
||||||
enter(path) {
|
enter(path) {
|
||||||
if (path.node === match.element.codeNode) {
|
if (
|
||||||
path.replaceWith(output);
|
!(
|
||||||
|
path.node.type === "Program" ||
|
||||||
|
path.node.type === "File"
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
path.node === match.statements[0].element.codeNode
|
||||||
|
) {
|
||||||
|
path.replaceWithMultiple(
|
||||||
|
traToWithWildcards.program.body
|
||||||
|
);
|
||||||
|
let siblings = path.getAllNextSiblings();
|
||||||
|
|
||||||
|
for (
|
||||||
|
let i = 0;
|
||||||
|
i < match.statements.length - 1;
|
||||||
|
i++
|
||||||
|
) {
|
||||||
|
siblings[i].remove();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return codeAST;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function transformMatch(
|
export function transformMatch(
|
||||||
|
@ -39,23 +65,18 @@ export function transformMatch(
|
||||||
trnTo: TreeNode<t.Node>,
|
trnTo: TreeNode<t.Node>,
|
||||||
output: t.Node
|
output: t.Node
|
||||||
) {
|
) {
|
||||||
if (trnTo.element.type == "Program") {
|
let isMatchingIdentifier = matchNode(
|
||||||
return transformMatch(match, trnTo.children[0], output);
|
match.element.aplToNode,
|
||||||
}
|
trnTo.element
|
||||||
|
);
|
||||||
let isMatch = matchNode(match.element.aplToNode, trnTo.element);
|
if (isMatchingIdentifier) {
|
||||||
if (isMatch) {
|
|
||||||
if (trnTo.element.type == "Identifier") {
|
|
||||||
traverse(output, {
|
traverse(output, {
|
||||||
enter(path) {
|
Identifier: (path) => {
|
||||||
if (path.isIdentifier({ name: trnTo.element.name })) {
|
if (path.node.name === (<t.Identifier>trnTo.element).name) {
|
||||||
if (match.element.codeNode) {
|
path.replaceWithMultiple(match.element.codeNode);
|
||||||
path.replaceWith(match.element.codeNode);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
for (let match_child of match.children) {
|
for (let match_child of match.children) {
|
||||||
transformMatch(match_child, trnTo, output);
|
transformMatch(match_child, trnTo, output);
|
||||||
|
@ -70,18 +91,7 @@ function matchNode(aplTo: t.Node, trnTo: t.Node): boolean {
|
||||||
//console.log(trnTo);
|
//console.log(trnTo);
|
||||||
|
|
||||||
if (trnTo.type == "Identifier" && aplTo.type == "Identifier") {
|
if (trnTo.type == "Identifier" && aplTo.type == "Identifier") {
|
||||||
let aplToName = washName(aplTo.name);
|
return aplTo.name === trnTo.name;
|
||||||
let trnToName = trnTo.name;
|
|
||||||
if (aplToName == trnToName) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
} else if (trnTo.type == "Identifier" && aplTo.type == "Identifier") {
|
|
||||||
let aplToName = washName(aplTo.name);
|
|
||||||
let trnToName = trnTo.name;
|
|
||||||
|
|
||||||
if (aplToName == trnToName) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
0
test_files/do_test.js
Normal file
0
test_files/do_test.js
Normal file
|
@ -1,2 +1,7 @@
|
||||||
let ThisTest = LOOOOOOOOL();
|
let a = LOOOOOOOOL();
|
||||||
let HAHHAHAH = 1 + 1;
|
let b = (999 * 128) / 12;
|
||||||
|
|
||||||
|
const haha = () => {
|
||||||
|
let a = LOOOOOOOOL();
|
||||||
|
let b = (999 * 128) / 12;
|
||||||
|
};
|
||||||
|
|
|
@ -1 +1,8 @@
|
||||||
let something = 1 + 1;
|
let something = 1 + 1;
|
||||||
|
let yikers = hahahah;
|
||||||
|
|
||||||
|
let lol = () => 100 + 100;
|
||||||
|
|
||||||
|
function haha() {
|
||||||
|
let fhdsjkfhdsjkfhds = fjhdkslfjhdsklfjdskl;
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue