added boolean logic on the DSL types :)
This commit is contained in:
parent
b1a3ece8f1
commit
7f3580f15a
16 changed files with 624 additions and 186 deletions
|
@ -1,31 +1,32 @@
|
|||
proposal DoExpression{
|
||||
pair arrowFunction{
|
||||
applicable to {
|
||||
"() => {
|
||||
<<blockStatements: anyStatementList | hello >>
|
||||
return << returnExpr: Expr >>
|
||||
}"
|
||||
"let <<ident:Identifier>> = () => {
|
||||
<<statements: NMinus1Statements >>
|
||||
return <<returnVal : Expression | Identifier>>;
|
||||
}
|
||||
"
|
||||
}
|
||||
transform to {
|
||||
"do {
|
||||
<< blockStatements >>
|
||||
<< returnExpr >>
|
||||
"let <<ident>> = do {
|
||||
<<statements>>
|
||||
<<returnVal>>
|
||||
}"
|
||||
}
|
||||
}
|
||||
|
||||
pair immediatelyInvokedUnnamedFunction {
|
||||
applicable to {
|
||||
"function(){
|
||||
<<blockStatements: anyNStatements>>
|
||||
return << returnExpr: Expr >>
|
||||
"let <<ident:Identifier>> = function(){
|
||||
<<statements: NMinus1Statements >>
|
||||
return <<returnVal : Expression | Identifier>>;
|
||||
}();"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"do {
|
||||
<< blockStatements >>
|
||||
<< returnExpr >>
|
||||
"let <<ident>> = do {
|
||||
<<statements>>
|
||||
<<returnVal>>
|
||||
}"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
proposal MultiStmt{
|
||||
pair Smthn{
|
||||
applicable to{
|
||||
"let <<something:Identifier>> = <<aiai:Identifier | MemberExpression>>();
|
||||
let <<binaryExprLOL:Identifier>> = 1 + 1;
|
||||
"let <<ident1:Identifier>> = <<funcIdent:Identifier | MemberExpression>>();
|
||||
let <<ident2:Identifier>> = <<expr:Expression>>;
|
||||
"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"() => {
|
||||
let <<something>> = <<aiai>>();
|
||||
return <<binaryExprLOL>>;
|
||||
"const ident2 = () => {
|
||||
let <<ident1>> = <<funcIdent>>();
|
||||
return <<expr>>;
|
||||
}"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,24 +1,11 @@
|
|||
proposal Pipeline{
|
||||
pair SingleArgument {
|
||||
applicable to {
|
||||
"<<someFunctionIdent:Identifier | MemberExpression>>(<<someFunctionParam: Expression | Identifier>>);"
|
||||
"<<someFunctionIdent:Identifier || MemberExpression>>(<<someFunctionParam: Expression>>);"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"<<someFunctionParam>> |> <<someFunctionIdent>>(%);"
|
||||
}
|
||||
}
|
||||
|
||||
pair MultiArgument {
|
||||
applicable to {
|
||||
"<<someFunctionIdent:Identifier>>(
|
||||
<<firstFunctionParam : Expression | Identifier>>,
|
||||
<<restOfFunctionParams: anyRest>>
|
||||
);"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"<<firstFunctionParam>> |> <<someFunctionIdent>>(%, <<restOfFunctionParams>>);"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@ proposal test_single_stmt{
|
|||
"let <<aaaa: Identifier >> = <<bbbb: Expression | Identifier>>"
|
||||
}
|
||||
transform to {
|
||||
"let <<aaaa>> = () => <<bbbb>>"
|
||||
"let <<aaaa>> = 1 + <<bbbb>>;"
|
||||
}
|
||||
}
|
||||
}
|
17
grammars/wildcard_grammar.txt
Normal file
17
grammars/wildcard_grammar.txt
Normal file
|
@ -0,0 +1,17 @@
|
|||
|
||||
Wildcard:
|
||||
Identifier ":" TypeExpr ("*"?)
|
||||
TypeExpr:
|
||||
BinaryExpr
|
||||
|
||||
BinaryExpr:
|
||||
UnaryExpr { Operator PrimitiveExpr }*
|
||||
|
||||
UnaryExpr:
|
||||
{UnaryOperator}? PrimitiveExpr
|
||||
|
||||
PrimitiveExpr:
|
||||
GroupExpr | Identifier
|
||||
|
||||
GroupExpr:
|
||||
"(" TypeExpr ")"
|
|
@ -2,7 +2,9 @@ function parse() {
|
|||
const input = ("input" |> document.getElementById(%)).value;
|
||||
const data = 32 |> input.slice(%);
|
||||
const compressedData = data |> decode_base64(%);
|
||||
const uncompressed = compressedData |> pako.inflate(%);
|
||||
const uncompressed = pako.inflate(compressedData, {
|
||||
to: "string"
|
||||
});
|
||||
const json = uncompressed |> JSON.parse(%);
|
||||
json |> console.log(%);
|
||||
json |> convertToDesktop(%);
|
||||
|
@ -35,7 +37,9 @@ function convertToDesktop(json) {
|
|||
...mappedValues,
|
||||
...pcSpecificValues
|
||||
};
|
||||
const compressed = newData |> JSON.stringify(%) |> pako.deflate(%);
|
||||
const compressed = pako.deflate(newData |> JSON.stringify(%), {
|
||||
to: "string"
|
||||
});
|
||||
const base64 = compressed |> btoa(%);
|
||||
const finalSaveString = hash + base64;
|
||||
("output_output" |> document.getElementById(%)).innerText = finalSaveString;
|
||||
|
@ -76,7 +80,7 @@ function decode_base64(s) {
|
|||
c,
|
||||
x,
|
||||
l = 0,
|
||||
o = i |> s.substring(%);
|
||||
o = s.substring(i, i + 72);
|
||||
for (x = 0; x < o.length; x++) {
|
||||
c = e[x |> o.charAt(%)];
|
||||
b = (b << 6) + c;
|
||||
|
|
|
@ -8,7 +8,6 @@ import {
|
|||
transform,
|
||||
} from "./transform/transform";
|
||||
|
||||
import { parseDSLtoAST } from "../didactic-chainsaw-dsl/src/JSTQL_interface/fetchAST";
|
||||
import { parseJSTQL } from "./langium/langiumRunner";
|
||||
const path = "test_files/test.js";
|
||||
const file = Bun.file(path);
|
||||
|
|
|
@ -2,10 +2,11 @@ import * as t from "@babel/types";
|
|||
|
||||
import * as babelparser from "@babel/parser";
|
||||
import { TreeNode, makeTree, showTree } from "../data_structures/tree";
|
||||
import { InternalDSLVariable } from "../parser/parse";
|
||||
import { Wildcard } from "../parser/parse";
|
||||
import generate from "@babel/generator";
|
||||
import { WildcardEvalVisitor } from "./wildcardEvaluator";
|
||||
|
||||
const keys_to_ignore = ["loc", "start", "end", "type"];
|
||||
|
||||
export interface MatchedTreeNode {
|
||||
aplToNode: TreeNode<t.Node>;
|
||||
codeNode: TreeNode<t.Node>;
|
||||
|
@ -16,11 +17,21 @@ export interface PairedNodes {
|
|||
codeNode: t.Node;
|
||||
}
|
||||
|
||||
export interface Match {
|
||||
statements: TreeNode<PairedNodes>[];
|
||||
}
|
||||
|
||||
enum MatchCurrentResult {
|
||||
MatchedWithWildcard,
|
||||
Matched,
|
||||
NoMatch,
|
||||
}
|
||||
|
||||
export function runMatch(
|
||||
code: TreeNode<t.Node>,
|
||||
applicableTo: TreeNode<t.Node>,
|
||||
internals: InternalDSLVariable
|
||||
): TreeNode<PairedNodes>[] {
|
||||
internals: Wildcard[]
|
||||
): Match[] {
|
||||
// Special case for a single expression, we have to remove "ExpressionStatement" node.
|
||||
if (applicableTo.children.length === 1) {
|
||||
if (applicableTo.children[0].element.type === "ExpressionStatement") {
|
||||
|
@ -42,20 +53,18 @@ export function runMatch(
|
|||
return matcher.matches;
|
||||
}
|
||||
} else {
|
||||
showTree(code);
|
||||
showTree(applicableTo);
|
||||
|
||||
let matcher = new Matcher(internals, applicableTo.element);
|
||||
matcher.multiStatementMatcher(code, applicableTo);
|
||||
|
||||
return matcher.matches;
|
||||
}
|
||||
}
|
||||
|
||||
export class Matcher {
|
||||
public matches: TreeNode<PairedNodes>[];
|
||||
private internals: InternalDSLVariable;
|
||||
public matches: Match[];
|
||||
private internals: Wildcard[];
|
||||
private aplToFull: t.Node;
|
||||
constructor(internals: InternalDSLVariable, aplToFull: t.Node) {
|
||||
constructor(internals: Wildcard[], aplToFull: t.Node) {
|
||||
this.matches = [];
|
||||
this.internals = internals;
|
||||
this.aplToFull = aplToFull;
|
||||
|
@ -77,27 +86,37 @@ export class Matcher {
|
|||
}
|
||||
}
|
||||
// Store all full matches
|
||||
this.matches.push(...temp);
|
||||
this.matches.push(
|
||||
...temp.map((x) => {
|
||||
return {
|
||||
statements: [x],
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
// Check if the current matches
|
||||
|
||||
let curMatches = this.checkCodeNode(code.element, aplTo.element);
|
||||
curMatches =
|
||||
curMatches && code.children.length >= aplTo.children.length;
|
||||
if (!curMatches) {
|
||||
return;
|
||||
}
|
||||
// At this point current does match
|
||||
// Perform a search on each of the children of both AplTo and Code.
|
||||
let pairedCurrent: TreeNode<PairedNodes> = new TreeNode(null, {
|
||||
codeNode: code.element,
|
||||
aplToNode: aplTo.element,
|
||||
});
|
||||
if (curMatches === MatchCurrentResult.NoMatch) {
|
||||
return;
|
||||
} else if (curMatches === MatchCurrentResult.MatchedWithWildcard) {
|
||||
return pairedCurrent;
|
||||
} else if (code.children.length !== aplTo.children.length) {
|
||||
return;
|
||||
}
|
||||
// At this point current does match
|
||||
// Perform a search on each of the children of both AplTo and Code.
|
||||
|
||||
for (let i = 0; i < aplTo.children.length; i++) {
|
||||
let childSearch = this.singleExprMatcher(
|
||||
code.children[i],
|
||||
aplTo.children[i]
|
||||
);
|
||||
|
||||
if (childSearch === undefined) {
|
||||
// Failed to get a full match, so early return here
|
||||
return;
|
||||
|
@ -110,6 +129,43 @@ export class Matcher {
|
|||
return pairedCurrent;
|
||||
}
|
||||
|
||||
private checkCodeNode(
|
||||
codeNode: t.Node,
|
||||
aplToNode: t.Node
|
||||
): MatchCurrentResult {
|
||||
// First verify the internal DSL variables
|
||||
|
||||
if (aplToNode.type === "Identifier") {
|
||||
for (let wildcard of this.internals) {
|
||||
if (WildcardEvalVisitor.visit(wildcard.expr, codeNode)) {
|
||||
return MatchCurrentResult.MatchedWithWildcard;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (codeNode.type != aplToNode.type) {
|
||||
return MatchCurrentResult.NoMatch;
|
||||
}
|
||||
|
||||
//If not an internal DSL variable, gotta verify that the identifier is the same
|
||||
if (codeNode.type === "Identifier" && aplToNode.type === "Identifier") {
|
||||
if (codeNode.name != aplToNode.name) {
|
||||
return MatchCurrentResult.NoMatch;
|
||||
}
|
||||
}
|
||||
for (let key of Object.keys(aplToNode)) {
|
||||
if (key in keys_to_ignore) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!Object.keys(codeNode).includes(key)) {
|
||||
return MatchCurrentResult.NoMatch;
|
||||
}
|
||||
}
|
||||
|
||||
return MatchCurrentResult.Matched;
|
||||
}
|
||||
|
||||
multiStatementMatcher(code: TreeNode<t.Node>, aplTo: TreeNode<t.Node>) {
|
||||
if (
|
||||
code.element.type === "Program" ||
|
||||
|
@ -127,17 +183,18 @@ export class Matcher {
|
|||
// Sliding window the size of aplTo
|
||||
for (let y = 0; y <= code.length - aplTo.length; y++) {
|
||||
let fullMatch = true;
|
||||
let collection: TreeNode<PairedNodes>[] = [];
|
||||
let statements: TreeNode<PairedNodes>[] = [];
|
||||
for (let i = 0; i < aplTo.length; i++) {
|
||||
let res = this.exactExprMatcher(code[i + y], aplTo[i]);
|
||||
if (!res) {
|
||||
fullMatch = false;
|
||||
break;
|
||||
}
|
||||
collection.push(res);
|
||||
statements.push(res);
|
||||
}
|
||||
if (fullMatch) {
|
||||
this.matches.push(...collection);
|
||||
console.log(statements.length);
|
||||
this.matches.push({ statements });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -174,42 +231,4 @@ export class Matcher {
|
|||
|
||||
return paired;
|
||||
}
|
||||
|
||||
private checkCodeNode(code_node: t.Node, aplTo: t.Node): boolean {
|
||||
// First verify the internal DSL variables
|
||||
|
||||
if (aplTo.type === "Identifier") {
|
||||
if (aplTo.name in this.internals) {
|
||||
if (this.internals[aplTo.name].includes(code_node.type)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this.internals[aplTo.name].includes("Expression")) {
|
||||
return t.isExpression(code_node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (code_node.type != aplTo.type) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//If not an internal DSL variable, gotta verify that the identifier is the same
|
||||
if (code_node.type === "Identifier" && aplTo.type === "Identifier") {
|
||||
if (code_node.name != aplTo.name) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (let key of Object.keys(aplTo)) {
|
||||
if (key in keys_to_ignore) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!Object.keys(code_node).includes(key)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
42
src/matcher/wildcardEvaluator.ts
Normal file
42
src/matcher/wildcardEvaluator.ts
Normal file
|
@ -0,0 +1,42 @@
|
|||
import * as t from "@babel/types";
|
||||
import {
|
||||
BinaryExpr,
|
||||
GroupExpr,
|
||||
Identifier,
|
||||
TypeExpr,
|
||||
UnaryExpr,
|
||||
Wildcard,
|
||||
WildcardNode,
|
||||
} from "../parser/parse";
|
||||
|
||||
export class WildcardEvalVisitor {
|
||||
static visit(node: WildcardNode, toComp: t.Node): boolean {
|
||||
switch (node.nodeType) {
|
||||
case "BinaryExpr": {
|
||||
let cur = node as BinaryExpr;
|
||||
let left = this.visit(cur.left, toComp);
|
||||
let right = this.visit(cur.right, toComp);
|
||||
if (cur.op === "&&") {
|
||||
return left && right;
|
||||
} else {
|
||||
return left || right;
|
||||
}
|
||||
}
|
||||
case "UnaryExpr": {
|
||||
let cur = node as UnaryExpr;
|
||||
return !this.visit(cur.expr, toComp);
|
||||
}
|
||||
case "GroupExpr": {
|
||||
let cur = node as GroupExpr;
|
||||
return this.visit(cur.expr, toComp);
|
||||
}
|
||||
case "Identifier": {
|
||||
let cur = node as Identifier;
|
||||
if (cur.name === "Expression") {
|
||||
return t.isExpression(toComp);
|
||||
}
|
||||
return cur.name === toComp.type;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,22 +1,17 @@
|
|||
import * as babelparser from "@babel/parser";
|
||||
|
||||
import * as t from "@babel/types";
|
||||
|
||||
export interface InternalDSLVariable {
|
||||
[internals: string]: string[];
|
||||
}
|
||||
import { WildcardToken, WildcardTokenizer } from "./wildcardTokenizer";
|
||||
|
||||
export interface InternalParseResult {
|
||||
prelude: InternalDSLVariable;
|
||||
prelude: Wildcard[];
|
||||
cleanedJS: string;
|
||||
}
|
||||
|
||||
export function parseInternal(code: string): InternalParseResult {
|
||||
export function parseInternalTraTo(code: string): string {
|
||||
let cleanedJS = "";
|
||||
let temp = "";
|
||||
let flag = false;
|
||||
let prelude: InternalDSLVariable = {};
|
||||
|
||||
for (let i = 0; i < code.length; i++) {
|
||||
if (code[i] === "<" && code[i + 1] === "<") {
|
||||
// From now in we are inside of the DSL custom block
|
||||
|
@ -28,11 +23,46 @@ export function parseInternal(code: string): InternalParseResult {
|
|||
if (flag && code[i] === ">" && code[i + 1] === ">") {
|
||||
// We encountered a closing tag
|
||||
flag = false;
|
||||
let { identifier, types } = parseInternalString(temp);
|
||||
|
||||
cleanedJS += identifier;
|
||||
cleanedJS += temp;
|
||||
|
||||
prelude[identifier] = types;
|
||||
i += 1;
|
||||
temp = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (flag) {
|
||||
temp += code[i];
|
||||
} else {
|
||||
cleanedJS += code[i];
|
||||
}
|
||||
}
|
||||
return cleanedJS;
|
||||
}
|
||||
|
||||
export function parseInternalAplTo(code: string): InternalParseResult {
|
||||
let cleanedJS = "";
|
||||
let temp = "";
|
||||
let flag = false;
|
||||
let prelude: Wildcard[] = [];
|
||||
for (let i = 0; i < code.length; i++) {
|
||||
if (code[i] === "<" && code[i + 1] === "<") {
|
||||
// From now in we are inside of the DSL custom block
|
||||
flag = true;
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (flag && code[i] === ">" && code[i + 1] === ">") {
|
||||
// We encountered a closing tag
|
||||
flag = false;
|
||||
let wildcard = new WildcardParser(
|
||||
new WildcardTokenizer(temp).tokenize()
|
||||
).parse();
|
||||
|
||||
cleanedJS += wildcard.identifier.name;
|
||||
|
||||
prelude.push(wildcard);
|
||||
i += 1;
|
||||
temp = "";
|
||||
continue;
|
||||
|
@ -47,23 +77,198 @@ export function parseInternal(code: string): InternalParseResult {
|
|||
return { prelude, cleanedJS };
|
||||
}
|
||||
|
||||
function parseInternalString(dslString: string): {
|
||||
identifier: string;
|
||||
types: string[];
|
||||
} {
|
||||
let [identifier, typeString, ..._] = dslString
|
||||
.replace(/\s/g, "")
|
||||
.split(":");
|
||||
export interface Identifier extends WildcardNode {
|
||||
nodeType: "Identifier";
|
||||
name: string;
|
||||
}
|
||||
|
||||
if (_.length > 0) {
|
||||
// This is an error, and it means we probably have encountered two bitshift operators
|
||||
throw new Error("Probably encountered bitshift");
|
||||
export interface Wildcard {
|
||||
nodeType: "Wildcard";
|
||||
identifier: Identifier;
|
||||
expr: TypeExpr;
|
||||
star: boolean;
|
||||
}
|
||||
|
||||
export interface WildcardNode {
|
||||
nodeType: "BinaryExpr" | "UnaryExpr" | "GroupExpr" | "Identifier";
|
||||
}
|
||||
|
||||
export type TypeExpr = BinaryExpr | UnaryExpr | PrimitiveExpr;
|
||||
|
||||
export type BinaryOperator = "||" | "&&";
|
||||
|
||||
export type UnaryOperator = "!";
|
||||
|
||||
export interface BinaryExpr extends WildcardNode {
|
||||
nodeType: "BinaryExpr";
|
||||
left: UnaryExpr | BinaryExpr | PrimitiveExpr;
|
||||
op: BinaryOperator;
|
||||
right: UnaryExpr | BinaryExpr | PrimitiveExpr;
|
||||
}
|
||||
export interface UnaryExpr extends WildcardNode {
|
||||
nodeType: "UnaryExpr";
|
||||
op: UnaryOperator;
|
||||
expr: PrimitiveExpr;
|
||||
}
|
||||
|
||||
export type PrimitiveExpr = GroupExpr | Identifier;
|
||||
|
||||
export interface GroupExpr extends WildcardNode {
|
||||
nodeType: "GroupExpr";
|
||||
expr: TypeExpr;
|
||||
}
|
||||
|
||||
class WildcardParser {
|
||||
private position = -1;
|
||||
|
||||
constructor(private tokens: WildcardToken[]) {}
|
||||
private getCurrentToken() {
|
||||
// 1. Return the element of array `tokens` at the current position.
|
||||
return this.tokens[this.position];
|
||||
}
|
||||
|
||||
return {
|
||||
identifier,
|
||||
types: typeString ? typeString.split("|") : [""],
|
||||
};
|
||||
private advance(): void {
|
||||
// 1. Increment the value of `currentPosition` by 1.
|
||||
this.position += 1;
|
||||
}
|
||||
|
||||
private peek() {
|
||||
// 1. Return the element of array `tokens` at a position immediately after the current position.
|
||||
return this.tokens[this.position + 1];
|
||||
}
|
||||
|
||||
private error() {
|
||||
return new Error(
|
||||
"Parsing failed at position: " +
|
||||
this.position +
|
||||
". The erroneous input token is: " +
|
||||
this.getCurrentToken().value
|
||||
);
|
||||
}
|
||||
|
||||
parse(): Wildcard {
|
||||
return this.Wildcard();
|
||||
}
|
||||
|
||||
private Wildcard(): Wildcard {
|
||||
let identifier = this.Identifier();
|
||||
this.Semicolon();
|
||||
let multidenoted = this.TypeExpr();
|
||||
let star = this.Star();
|
||||
return {
|
||||
nodeType: "Wildcard",
|
||||
identifier,
|
||||
expr: multidenoted,
|
||||
star,
|
||||
};
|
||||
}
|
||||
|
||||
private Star(): boolean {
|
||||
if (this.peek() && this.peek().tokenKind === "Star") {
|
||||
this.advance();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private TypeExpr(): TypeExpr {
|
||||
if (this.peek().tokenKind === "UnaryOperator") {
|
||||
return this.UnaryExpr();
|
||||
} else {
|
||||
return this.BinaryExpr();
|
||||
}
|
||||
}
|
||||
|
||||
private BinaryExpr(): BinaryExpr | UnaryExpr | PrimitiveExpr {
|
||||
let left: UnaryExpr | BinaryExpr | PrimitiveExpr = this.UnaryExpr();
|
||||
while (this.peek() && this.peek().tokenKind === "BinaryOperator") {
|
||||
let op = this.BinaryOperator();
|
||||
let right = this.UnaryExpr();
|
||||
left = {
|
||||
nodeType: "BinaryExpr",
|
||||
left,
|
||||
op,
|
||||
right,
|
||||
};
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
private BinaryOperator(): BinaryOperator {
|
||||
if (this.peek().tokenKind === "BinaryOperator") {
|
||||
this.advance();
|
||||
return this.getCurrentToken().value as BinaryOperator;
|
||||
} else throw this.error();
|
||||
}
|
||||
|
||||
private UnaryExpr(): UnaryExpr | PrimitiveExpr {
|
||||
if (this.peek().tokenKind === "UnaryOperator") {
|
||||
let UnaryOperator = this.UnaryOperator();
|
||||
let expr = this.PrimitiveExpr();
|
||||
return {
|
||||
nodeType: "UnaryExpr",
|
||||
op: UnaryOperator,
|
||||
expr,
|
||||
};
|
||||
} else {
|
||||
return this.PrimitiveExpr();
|
||||
}
|
||||
}
|
||||
|
||||
private UnaryOperator(): UnaryOperator {
|
||||
if (this.peek().tokenKind === "UnaryOperator") {
|
||||
this.advance();
|
||||
return this.getCurrentToken().value as UnaryOperator;
|
||||
} else throw this.error();
|
||||
}
|
||||
|
||||
private PrimitiveExpr(): PrimitiveExpr {
|
||||
if (this.peek().tokenKind === "OpeningParenthesis") {
|
||||
return this.GroupExpr();
|
||||
} else {
|
||||
return this.Identifier();
|
||||
}
|
||||
}
|
||||
|
||||
private GroupExpr(): GroupExpr {
|
||||
this.OpeningParenthesis();
|
||||
let expr = this.TypeExpr();
|
||||
this.ClosingParenthesis();
|
||||
return {
|
||||
nodeType: "GroupExpr",
|
||||
expr,
|
||||
};
|
||||
}
|
||||
|
||||
private OpeningParenthesis() {
|
||||
if (this.peek().tokenKind === "OpeningParenthesis") {
|
||||
this.advance();
|
||||
} else throw this.error();
|
||||
}
|
||||
private ClosingParenthesis() {
|
||||
if (this.peek().tokenKind === "ClosingParenthesis") {
|
||||
this.advance();
|
||||
} else throw this.error();
|
||||
}
|
||||
|
||||
private Semicolon() {
|
||||
if (this.peek().tokenKind === "Semicolon") {
|
||||
this.advance();
|
||||
} else {
|
||||
throw this.error();
|
||||
}
|
||||
}
|
||||
private Identifier(): Identifier {
|
||||
if (this.peek().tokenKind === "Identifier") {
|
||||
this.advance();
|
||||
return {
|
||||
nodeType: "Identifier",
|
||||
name: this.getCurrentToken().value,
|
||||
};
|
||||
} else throw this.error();
|
||||
}
|
||||
}
|
||||
|
||||
export function parse_with_plugins(
|
||||
|
@ -73,3 +278,21 @@ export function parse_with_plugins(
|
|||
plugins: [["pipelineOperator", { proposal: "hack", topicToken: "%" }]],
|
||||
});
|
||||
}
|
||||
|
||||
function testParser() {
|
||||
console.dir(
|
||||
parseInternalAplTo(
|
||||
"<<someFunctionIdent:Identifier || MemberExpression>>(<<someFunctionParam: Expression || Identifier>>);"
|
||||
),
|
||||
{ depth: null }
|
||||
);
|
||||
|
||||
console.dir(
|
||||
parseInternalAplTo("<<SomeIdent: Statement && !ReturnStatement >>"),
|
||||
{
|
||||
depth: null,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
//testParser();
|
||||
|
|
132
src/parser/wildcardTokenizer.ts
Normal file
132
src/parser/wildcardTokenizer.ts
Normal file
|
@ -0,0 +1,132 @@
|
|||
type TokenKind =
|
||||
| "BinaryOperator"
|
||||
| "UnaryOperator"
|
||||
| "Identifier"
|
||||
| "OpeningParenthesis"
|
||||
| "ClosingParenthesis"
|
||||
| "Star"
|
||||
| "Semicolon";
|
||||
|
||||
export interface WildcardToken {
|
||||
tokenKind: TokenKind;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export class WildcardTokenizer {
|
||||
private tokens: WildcardToken[] = [];
|
||||
private current = -1; // Have to start at -1 because first iteration advances
|
||||
private source: string[];
|
||||
constructor(source: string) {
|
||||
this.source = source.split("");
|
||||
}
|
||||
|
||||
tokenize(): WildcardToken[] {
|
||||
while (this.current < this.source.length - 1) {
|
||||
this.scanToken();
|
||||
}
|
||||
return this.tokens;
|
||||
}
|
||||
|
||||
private peek(): string | undefined {
|
||||
return this.source[this.current + 1];
|
||||
}
|
||||
private getCurrent() {
|
||||
return this.source[this.current];
|
||||
}
|
||||
private advance() {
|
||||
this.current += 1;
|
||||
}
|
||||
|
||||
private consumeToken(tokenKind: TokenKind, value: string) {
|
||||
this.tokens.push({ tokenKind, value });
|
||||
}
|
||||
|
||||
private scanToken() {
|
||||
this.advance();
|
||||
let char = this.getCurrent();
|
||||
switch (char) {
|
||||
case "(": {
|
||||
this.consumeToken("OpeningParenthesis", char);
|
||||
break;
|
||||
}
|
||||
case ")": {
|
||||
this.consumeToken("ClosingParenthesis", char);
|
||||
break;
|
||||
}
|
||||
case "|": {
|
||||
if (this.peek() === "|") {
|
||||
this.advance();
|
||||
this.consumeToken("BinaryOperator", "||");
|
||||
} else {
|
||||
throw new Error(
|
||||
"Invalid token given to tokenizer: " + char
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "!": {
|
||||
this.consumeToken("UnaryOperator", char);
|
||||
break;
|
||||
}
|
||||
case "&": {
|
||||
if (this.peek() === "&") {
|
||||
this.advance();
|
||||
this.consumeToken("BinaryOperator", "&&");
|
||||
} else {
|
||||
throw new Error(
|
||||
"Invalid token given to tokenizer: " + char
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "*": {
|
||||
this.consumeToken("Star", char);
|
||||
break;
|
||||
}
|
||||
case ":": {
|
||||
this.consumeToken("Semicolon", char);
|
||||
break;
|
||||
}
|
||||
case " ":
|
||||
break;
|
||||
default:
|
||||
if (this.isAlpha(char)) {
|
||||
this.consumeAlpha();
|
||||
break;
|
||||
} else {
|
||||
throw new Error("Invalid token given: " + char);
|
||||
}
|
||||
}
|
||||
}
|
||||
private consumeAlpha() {
|
||||
let word = "";
|
||||
|
||||
while (true) {
|
||||
word += this.getCurrent();
|
||||
|
||||
let next = this.peek();
|
||||
if (next && this.isAlpha(next)) {
|
||||
this.advance();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.consumeToken("Identifier", word);
|
||||
}
|
||||
private isAlpha(val: string): boolean {
|
||||
let alphabet = new Set(
|
||||
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_".split("")
|
||||
);
|
||||
return alphabet.has(val);
|
||||
}
|
||||
}
|
||||
|
||||
function testWildcardTokenizer() {
|
||||
let tokenized = new WildcardTokenizer(
|
||||
"aiaiai: ((LOL||!Smack)&&SomethingElse)*"
|
||||
).tokenize();
|
||||
|
||||
console.log(tokenized);
|
||||
}
|
||||
//testWildcardTokenizer();
|
|
@ -4,6 +4,8 @@ import generate from "@babel/generator";
|
|||
import {
|
||||
InternalDSLVariable,
|
||||
parseInternal,
|
||||
parseInternalAplTo,
|
||||
parseInternalTraTo,
|
||||
parse_with_plugins,
|
||||
} from "../parser/parse";
|
||||
import {
|
||||
|
@ -42,12 +44,10 @@ export function transform(recipe: TransformRecipe, code: string): string {
|
|||
// We are using JSTQL
|
||||
// We have to parse JSTQL to the self hosted version
|
||||
|
||||
let { cleanedJS: applicableTo, prelude } = parseInternal(
|
||||
let { cleanedJS: applicableTo, prelude } = parseInternalAplTo(
|
||||
recipe.applicableTo
|
||||
);
|
||||
let { cleanedJS: transformTo, prelude: _ } = parseInternal(
|
||||
recipe.transformTo
|
||||
);
|
||||
let transformTo = parseInternalTraTo(recipe.transformTo);
|
||||
|
||||
return transformSelfHosted(
|
||||
{ applicableTo, transformTo },
|
||||
|
@ -70,6 +70,7 @@ function transformSelfHosted(
|
|||
let applicableToTree = makeTree(applicabelToAST);
|
||||
let transformTo = parse_with_plugins(recipe.transformTo);
|
||||
let transformToTree = makeTree(transformTo);
|
||||
|
||||
if (
|
||||
codeTree == undefined ||
|
||||
applicableToTree == undefined ||
|
||||
|
@ -78,23 +79,14 @@ function transformSelfHosted(
|
|||
throw new Error("This no worky LOL");
|
||||
}
|
||||
showTree(applicableToTree);
|
||||
console.log(generate(codeAST));
|
||||
|
||||
let matches = runMatch(codeTree, applicableToTree, internals);
|
||||
|
||||
console.log(matches.length);
|
||||
for (let match of matches.reverse()) {
|
||||
//console.log(transformToTree.element);
|
||||
// There is a bug here, for some reason it works sometimes when Program and sometimes when File, no clue why?????
|
||||
let output = parse_with_plugins(recipe.transformTo).program;
|
||||
try {
|
||||
transformer(match, transformToTree, output, codeAST);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
console.log("Final generated code: \n");
|
||||
|
||||
let output = generate(codeAST, { topicToken: "%" }).code;
|
||||
let outputAST = transformer(matches, transformToTree, codeAST, transformTo);
|
||||
|
||||
let output = generate(outputAST, { topicToken: "%" }).code;
|
||||
//showTree(transformToTree);
|
||||
return output;
|
||||
}
|
||||
|
|
|
@ -8,30 +8,56 @@ import {
|
|||
showTree,
|
||||
showTreePaired,
|
||||
} from "../data_structures/tree";
|
||||
import { InternalDSLVariable } from "../parser/parse";
|
||||
import { MatchedTreeNode, PairedNodes } from "../matcher/matcher";
|
||||
import { Match, MatchedTreeNode, PairedNodes } from "../matcher/matcher";
|
||||
import traverse from "@babel/traverse";
|
||||
import generate from "@babel/generator";
|
||||
import { TransformRecipe } from "./transform";
|
||||
|
||||
export function transformer(
|
||||
match: TreeNode<PairedNodes>,
|
||||
trnTo: TreeNode<t.Node>,
|
||||
output: t.Node,
|
||||
inputCode: t.Node
|
||||
) {
|
||||
transformMatch(match, trnTo, output);
|
||||
matches: Match[],
|
||||
transformTo: TreeNode<t.Node>,
|
||||
codeAST: t.Node,
|
||||
traToAST: t.File
|
||||
): t.Node {
|
||||
for (let match of matches.reverse()) {
|
||||
try {
|
||||
let traToWithWildcards = structuredClone(traToAST);
|
||||
for (let match_stmt of match.statements) {
|
||||
transformMatch(match_stmt, transformTo, traToWithWildcards);
|
||||
}
|
||||
traverse(codeAST, {
|
||||
enter(path) {
|
||||
if (
|
||||
!(
|
||||
path.node.type === "Program" ||
|
||||
path.node.type === "File"
|
||||
)
|
||||
) {
|
||||
if (
|
||||
path.node === match.statements[0].element.codeNode
|
||||
) {
|
||||
path.replaceWithMultiple(
|
||||
traToWithWildcards.program.body
|
||||
);
|
||||
let siblings = path.getAllNextSiblings();
|
||||
|
||||
if (output.type == "Program") {
|
||||
output = output.body[0];
|
||||
for (
|
||||
let i = 0;
|
||||
i < match.statements.length - 1;
|
||||
i++
|
||||
) {
|
||||
siblings[i].remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
|
||||
traverse(inputCode, {
|
||||
enter(path) {
|
||||
if (path.node === match.element.codeNode) {
|
||||
path.replaceWith(output);
|
||||
}
|
||||
},
|
||||
});
|
||||
return codeAST;
|
||||
}
|
||||
|
||||
export function transformMatch(
|
||||
|
@ -39,23 +65,18 @@ export function transformMatch(
|
|||
trnTo: TreeNode<t.Node>,
|
||||
output: t.Node
|
||||
) {
|
||||
if (trnTo.element.type == "Program") {
|
||||
return transformMatch(match, trnTo.children[0], output);
|
||||
}
|
||||
|
||||
let isMatch = matchNode(match.element.aplToNode, trnTo.element);
|
||||
if (isMatch) {
|
||||
if (trnTo.element.type == "Identifier") {
|
||||
traverse(output, {
|
||||
enter(path) {
|
||||
if (path.isIdentifier({ name: trnTo.element.name })) {
|
||||
if (match.element.codeNode) {
|
||||
path.replaceWith(match.element.codeNode);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
let isMatchingIdentifier = matchNode(
|
||||
match.element.aplToNode,
|
||||
trnTo.element
|
||||
);
|
||||
if (isMatchingIdentifier) {
|
||||
traverse(output, {
|
||||
Identifier: (path) => {
|
||||
if (path.node.name === (<t.Identifier>trnTo.element).name) {
|
||||
path.replaceWithMultiple(match.element.codeNode);
|
||||
}
|
||||
},
|
||||
});
|
||||
} else {
|
||||
for (let match_child of match.children) {
|
||||
transformMatch(match_child, trnTo, output);
|
||||
|
@ -70,18 +91,7 @@ function matchNode(aplTo: t.Node, trnTo: t.Node): boolean {
|
|||
//console.log(trnTo);
|
||||
|
||||
if (trnTo.type == "Identifier" && aplTo.type == "Identifier") {
|
||||
let aplToName = washName(aplTo.name);
|
||||
let trnToName = trnTo.name;
|
||||
if (aplToName == trnToName) {
|
||||
return true;
|
||||
}
|
||||
} else if (trnTo.type == "Identifier" && aplTo.type == "Identifier") {
|
||||
let aplToName = washName(aplTo.name);
|
||||
let trnToName = trnTo.name;
|
||||
|
||||
if (aplToName == trnToName) {
|
||||
return true;
|
||||
}
|
||||
return aplTo.name === trnTo.name;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
0
test_files/do_test.js
Normal file
0
test_files/do_test.js
Normal file
|
@ -1,2 +1,7 @@
|
|||
let ThisTest = LOOOOOOOOL();
|
||||
let HAHHAHAH = 1 + 1;
|
||||
let a = LOOOOOOOOL();
|
||||
let b = (999 * 128) / 12;
|
||||
|
||||
const haha = () => {
|
||||
let a = LOOOOOOOOL();
|
||||
let b = (999 * 128) / 12;
|
||||
};
|
||||
|
|
|
@ -1 +1,8 @@
|
|||
let something = 1 + 1;
|
||||
let yikers = hahahah;
|
||||
|
||||
let lol = () => 100 + 100;
|
||||
|
||||
function haha() {
|
||||
let fhdsjkfhdsjkfhds = fjhdkslfjhdsklfjdskl;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue