Compare commits
28 commits
Author | SHA1 | Date | |
---|---|---|---|
cd45773354 | |||
dacc38ad24 | |||
6fef7f1b64 | |||
91387a397a | |||
fa090949ef | |||
dc04b92ef2 | |||
e5978c7630 | |||
7028df3d37 | |||
d468787983 | |||
1aa8956fb2 | |||
bf3abd232c | |||
3d3865d625 | |||
438d4589d4 | |||
b740b469d4 | |||
03d906da5a | |||
b6a7c645f2 | |||
8cb01d1bd0 | |||
e906095363 | |||
2e268835e5 | |||
0334431f32 | |||
52c3c8e24e | |||
96673b993c | |||
60bc21d8ab | |||
2ef239b466 | |||
38fe05af7f | |||
4ae79e6d92 | |||
975030d7ce | |||
4b03caa360 |
76 changed files with 6453 additions and 267 deletions
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
[submodule "babel"]
|
||||
path = babel
|
||||
url = git@github.com:polsevev/babel.git
|
53
.vscode/launch.json
vendored
Normal file
53
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
|
||||
{
|
||||
"type": "bun",
|
||||
"request": "launch",
|
||||
"name": "Debug Bun",
|
||||
|
||||
// The path to a JavaScript or TypeScript file to run.
|
||||
"program": "src/index.js",
|
||||
|
||||
// The arguments to pass to the program, if any.
|
||||
"args": [],
|
||||
|
||||
// The working directory of the program.
|
||||
"cwd": "${workspaceFolder}",
|
||||
|
||||
// The environment variables to pass to the program.
|
||||
"env": {},
|
||||
|
||||
// If the environment variables should not be inherited from the parent process.
|
||||
"strictEnv": false,
|
||||
|
||||
// If the program should be run in watch mode.
|
||||
// This is equivalent to passing `--watch` to the `bun` executable.
|
||||
// You can also set this to "hot" to enable hot reloading using `--hot`.
|
||||
"watchMode": false,
|
||||
|
||||
// If the debugger should stop on the first line of the program.
|
||||
"stopOnEntry": false,
|
||||
|
||||
// If the debugger should be disabled. (for example, breakpoints will not be hit)
|
||||
"noDebug": false,
|
||||
|
||||
// The path to the `bun` executable, defaults to your `PATH` environment variable.
|
||||
"runtime": "bun",
|
||||
|
||||
// The arguments to pass to the `bun` executable, if any.
|
||||
// Unlike `args`, these are passed to the executable itself, not the program.
|
||||
"runtimeArgs": [],
|
||||
},
|
||||
{
|
||||
"type": "bun",
|
||||
"request": "attach",
|
||||
"name": "Attach to Bun",
|
||||
|
||||
// The URL of the WebSocket inspector to attach to.
|
||||
// This value can be retrieved by using `bun --inspect`.
|
||||
"url": "ws://localhost:6499/",
|
||||
}
|
||||
]
|
||||
}
|
11
.vscode/settings.json
vendored
Normal file
11
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
// The path to the `bun` executable.
|
||||
"bun.runtime": "/home/rolfmg/.bun/bin/bun",
|
||||
|
||||
// If support for Bun should be added to the default "JavaScript Debug Terminal".
|
||||
"bun.debugTerminal.enabled": true,
|
||||
|
||||
// If the debugger should stop on the first line of the program.
|
||||
"bun.debugTerminal.stopOnEntry": true,
|
||||
"cSpell.words": ["babelparser"]
|
||||
}
|
13
JSTQL/.eslintrc.json
Normal file
13
JSTQL/.eslintrc.json
Normal file
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 6,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"rules": {
|
||||
}
|
||||
}
|
11
JSTQL/.gitignore
vendored
Normal file
11
JSTQL/.gitignore
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/tasks.json
|
||||
node_modules/
|
||||
out/
|
||||
src/language/generated/
|
||||
static/bundle/
|
||||
static/monaco-editor-workers/
|
||||
static/worker/
|
||||
syntaxes/
|
9
JSTQL/.vscode/extensions.json
vendored
Normal file
9
JSTQL/.vscode/extensions.json
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
|
||||
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
||||
|
||||
// List of extensions which should be recommended for users of this workspace.
|
||||
"recommendations": [
|
||||
"langium.langium-vscode"
|
||||
]
|
||||
}
|
35
JSTQL/.vscode/launch.json
vendored
Normal file
35
JSTQL/.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
// A launch configuration that launches the extension inside a new window
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Run Extension",
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceFolder}"
|
||||
],
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Attach to Language Server",
|
||||
"type": "node",
|
||||
"port": 6009,
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js",
|
||||
"${workspaceFolder}/node_modules/langium"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
21
JSTQL/.vscode/tasks.json
vendored
Normal file
21
JSTQL/.vscode/tasks.json
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Build jstql",
|
||||
"command": "npm run langium:generate && npm run build",
|
||||
"type": "shell",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"detail": "Langium: Generate grammar and build the jstql language",
|
||||
"icon": {
|
||||
"color": "terminal.ansiGreen",
|
||||
"id": "server-process"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
4
JSTQL/.vscodeignore
Normal file
4
JSTQL/.vscodeignore
Normal file
|
@ -0,0 +1,4 @@
|
|||
.vscode/**
|
||||
.vscode-test/**
|
||||
.gitignore
|
||||
langium-quickstart.md
|
BIN
JSTQL/JSTQL-0.0.1.vsix
Normal file
BIN
JSTQL/JSTQL-0.0.1.vsix
Normal file
Binary file not shown.
4
JSTQL/bin/cli.js
Normal file
4
JSTQL/bin/cli.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import main from '../out/cli/main.js';
|
||||
main();
|
54
JSTQL/esbuild.mjs
Normal file
54
JSTQL/esbuild.mjs
Normal file
|
@ -0,0 +1,54 @@
|
|||
//@ts-check
|
||||
import * as esbuild from 'esbuild';
|
||||
|
||||
const watch = process.argv.includes('--watch');
|
||||
const minify = process.argv.includes('--minify');
|
||||
|
||||
const success = watch ? 'Watch build succeeded' : 'Build succeeded';
|
||||
|
||||
function getTime() {
|
||||
const date = new Date();
|
||||
return `[${`${padZeroes(date.getHours())}:${padZeroes(date.getMinutes())}:${padZeroes(date.getSeconds())}`}] `;
|
||||
}
|
||||
|
||||
function padZeroes(i) {
|
||||
return i.toString().padStart(2, '0');
|
||||
}
|
||||
|
||||
const plugins = [{
|
||||
name: 'watch-plugin',
|
||||
setup(build) {
|
||||
build.onEnd(result => {
|
||||
if (result.errors.length === 0) {
|
||||
console.log(getTime() + success);
|
||||
}
|
||||
});
|
||||
},
|
||||
}];
|
||||
|
||||
const ctx = await esbuild.context({
|
||||
// Entry points for the vscode extension and the language server
|
||||
entryPoints: ['src/extension/main.ts', 'src/language/main.ts'],
|
||||
outdir: 'out',
|
||||
bundle: true,
|
||||
target: "ES2017",
|
||||
// VSCode's extension host is still using cjs, so we need to transform the code
|
||||
format: 'cjs',
|
||||
// To prevent confusing node, we explicitly use the `.cjs` extension
|
||||
outExtension: {
|
||||
'.js': '.cjs'
|
||||
},
|
||||
loader: { '.ts': 'ts' },
|
||||
external: ['vscode'],
|
||||
platform: 'node',
|
||||
sourcemap: !minify,
|
||||
minify,
|
||||
plugins
|
||||
});
|
||||
|
||||
if (watch) {
|
||||
await ctx.watch();
|
||||
} else {
|
||||
await ctx.rebuild();
|
||||
ctx.dispose();
|
||||
}
|
12
JSTQL/langium-config.json
Normal file
12
JSTQL/langium-config.json
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"projectName": "Jstql",
|
||||
"languages": [{
|
||||
"id": "jstql",
|
||||
"grammar": "src/language/jstql.langium",
|
||||
"fileExtensions": [".jstql"],
|
||||
"textMate": {
|
||||
"out": "syntaxes/jstql.tmLanguage.json"
|
||||
}
|
||||
}],
|
||||
"out": "src/language/generated"
|
||||
}
|
40
JSTQL/langium-quickstart.md
Normal file
40
JSTQL/langium-quickstart.md
Normal file
|
@ -0,0 +1,40 @@
|
|||
# Welcome to your Langium VS Code Extension
|
||||
|
||||
## What's in the folder
|
||||
|
||||
This folder contains all necessary files for your language extension.
|
||||
* `package.json` - the manifest file in which you declare your language support.
|
||||
* `language-configuration.json` - the language configuration used in the VS Code editor, defining the tokens that are used for comments and brackets.
|
||||
* `src/extension/main.ts` - the main code of the extension, which is responsible for launching a language server and client.
|
||||
* `src/language/jstql.langium` - the grammar definition of your language.
|
||||
* `src/language/main.ts` - the entry point of the language server process.
|
||||
* `src/language/jstql-module.ts` - the dependency injection module of your language implementation. Use this to register overridden and added services.
|
||||
* `src/language/jstql-validator.ts` - an example validator. You should change it to reflect the semantics of your language.
|
||||
* `src/cli/main.ts` - the entry point of the command line interface (CLI) of your language.
|
||||
* `src/cli/generator.ts` - the code generator used by the CLI to write output files from DSL documents.
|
||||
* `src/cli/cli-util.ts` - utility code for the CLI.
|
||||
|
||||
## Get up and running straight away
|
||||
|
||||
* Run `npm run langium:generate` to generate TypeScript code from the grammar definition.
|
||||
* Run `npm run build` to compile all TypeScript code.
|
||||
* Press `F5` to open a new window with your extension loaded.
|
||||
* Create a new file with a file name suffix matching your language.
|
||||
* Verify that syntax highlighting, validation, completion etc. are working as expected.
|
||||
* Run `node ./bin/cli` to see options for the CLI; `node ./bin/cli generate <file>` generates code for a given DSL file.
|
||||
|
||||
## Make changes
|
||||
|
||||
* Run `npm run watch` to have the TypeScript compiler run automatically after every change of the source files.
|
||||
* Run `npm run langium:watch` to have the Langium generator run automatically after every change of the grammar declaration.
|
||||
* You can relaunch the extension from the debug toolbar after making changes to the files listed above.
|
||||
* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes.
|
||||
|
||||
## Install your extension
|
||||
|
||||
* To start using your extension with VS Code, copy it into the `<user home>/.vscode/extensions` folder and restart Code.
|
||||
* To share your extension with the world, read the [VS Code documentation](https://code.visualstudio.com/api/working-with-extensions/publishing-extension) about publishing an extension.
|
||||
|
||||
## To Go Further
|
||||
|
||||
Documentation about the Langium framework is available at https://langium.org
|
30
JSTQL/language-configuration.json
Normal file
30
JSTQL/language-configuration.json
Normal file
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"comments": {
|
||||
// symbol used for single line comment. Remove this entry if your language does not support line comments
|
||||
"lineComment": "//",
|
||||
// symbols used for start and end a block comment. Remove this entry if your language does not support block comments
|
||||
"blockComment": [ "/*", "*/" ]
|
||||
},
|
||||
// symbols used as brackets
|
||||
"brackets": [
|
||||
["{", "}"],
|
||||
["[", "]"],
|
||||
["(", ")"]
|
||||
],
|
||||
// symbols that are auto closed when typing
|
||||
"autoClosingPairs": [
|
||||
["{", "}"],
|
||||
["[", "]"],
|
||||
["(", ")"],
|
||||
["\"", "\""],
|
||||
["'", "'"]
|
||||
],
|
||||
// symbols that can be used to surround a selection
|
||||
"surroundingPairs": [
|
||||
["{", "}"],
|
||||
["[", "]"],
|
||||
["(", ")"],
|
||||
["\"", "\""],
|
||||
["'", "'"]
|
||||
]
|
||||
}
|
2500
JSTQL/package-lock.json
generated
Normal file
2500
JSTQL/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
70
JSTQL/package.json
Normal file
70
JSTQL/package.json
Normal file
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
"name": "JSTQL",
|
||||
"description": "Please enter a brief description here",
|
||||
"version": "0.0.1",
|
||||
"files": [
|
||||
"bin"
|
||||
],
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc -b tsconfig.json && node esbuild.mjs",
|
||||
"watch": "concurrently -n tsc,esbuild -c blue,yellow \"tsc -b tsconfig.json --watch\" \"node esbuild.mjs --watch\"",
|
||||
"lint": "eslint src --ext ts",
|
||||
"langium:generate": "langium generate",
|
||||
"langium:watch": "langium generate --watch",
|
||||
"vscode:prepublish": "npm run build && npm run lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"langium": "~2.1.0",
|
||||
"vscode-languageclient": "~9.0.1",
|
||||
"vscode-languageserver": "~9.0.1",
|
||||
"chalk": "~5.3.0",
|
||||
"commander": "~11.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "~16.18.41",
|
||||
"@typescript-eslint/parser": "~6.4.1",
|
||||
"@typescript-eslint/eslint-plugin": "~6.4.1",
|
||||
"eslint": "~8.47.0",
|
||||
"langium-cli": "~2.1.0",
|
||||
"typescript": "~5.1.6",
|
||||
"@types/vscode": "~1.67.0",
|
||||
"concurrently": "~8.2.1",
|
||||
"esbuild": "~0.19.2"
|
||||
},
|
||||
"displayName": "JSTQL",
|
||||
"engines": {
|
||||
"vscode": "^1.67.0",
|
||||
"node": ">=16.0.0"
|
||||
},
|
||||
"categories": [
|
||||
"Programming Languages"
|
||||
],
|
||||
"contributes": {
|
||||
"languages": [
|
||||
{
|
||||
"id": "jstql",
|
||||
"aliases": [
|
||||
"JSTQL",
|
||||
"jstql"
|
||||
],
|
||||
"extensions": [".jstql"],
|
||||
"configuration": "./language-configuration.json"
|
||||
}
|
||||
],
|
||||
"grammars": [
|
||||
{
|
||||
"language": "jstql",
|
||||
"scopeName": "source.jstql",
|
||||
"path": "syntaxes/jstql.tmLanguage.json"
|
||||
}
|
||||
]
|
||||
},
|
||||
"activationEvents": [
|
||||
"onLanguage:jstql"
|
||||
],
|
||||
"main": "./out/extension/main.cjs",
|
||||
"bin": {
|
||||
"jstql-cli": "./bin/cli.js"
|
||||
}
|
||||
}
|
17
JSTQL/src/JSTQL_interface/api.ts
Normal file
17
JSTQL/src/JSTQL_interface/api.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
import type { Model } from "../language/generated/ast.js";
|
||||
|
||||
import { AstNode, EmptyFileSystem, LangiumDocument } from "langium";
|
||||
import { parseDocument } from "langium/test";
|
||||
import { createJstqlServices } from "../language/jstql-module.js";
|
||||
const services = createJstqlServices(EmptyFileSystem).Jstql;
|
||||
|
||||
export async function parseDSLtoAST(modelText: string): Promise<Model> {
|
||||
var doc: LangiumDocument<AstNode> = await parseDocument(
|
||||
services,
|
||||
modelText
|
||||
);
|
||||
const db = services.shared.workspace.DocumentBuilder;
|
||||
await db.build([doc], { validation: true });
|
||||
const model = doc.parseResult.value as Model;
|
||||
return model;
|
||||
}
|
51
JSTQL/src/cli/cli-util.ts
Normal file
51
JSTQL/src/cli/cli-util.ts
Normal file
|
@ -0,0 +1,51 @@
|
|||
import type { AstNode, LangiumDocument, LangiumServices } from 'langium';
|
||||
import chalk from 'chalk';
|
||||
import * as path from 'node:path';
|
||||
import * as fs from 'node:fs';
|
||||
import { URI } from 'langium';
|
||||
|
||||
export async function extractDocument(fileName: string, services: LangiumServices): Promise<LangiumDocument> {
|
||||
const extensions = services.LanguageMetaData.fileExtensions;
|
||||
if (!extensions.includes(path.extname(fileName))) {
|
||||
console.error(chalk.yellow(`Please choose a file with one of these extensions: ${extensions}.`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(fileName)) {
|
||||
console.error(chalk.red(`File ${fileName} does not exist.`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const document = services.shared.workspace.LangiumDocuments.getOrCreateDocument(URI.file(path.resolve(fileName)));
|
||||
await services.shared.workspace.DocumentBuilder.build([document], { validation: true });
|
||||
|
||||
const validationErrors = (document.diagnostics ?? []).filter(e => e.severity === 1);
|
||||
if (validationErrors.length > 0) {
|
||||
console.error(chalk.red('There are validation errors:'));
|
||||
for (const validationError of validationErrors) {
|
||||
console.error(chalk.red(
|
||||
`line ${validationError.range.start.line + 1}: ${validationError.message} [${document.textDocument.getText(validationError.range)}]`
|
||||
));
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
export async function extractAstNode<T extends AstNode>(fileName: string, services: LangiumServices): Promise<T> {
|
||||
return (await extractDocument(fileName, services)).parseResult?.value as T;
|
||||
}
|
||||
|
||||
interface FilePathData {
|
||||
destination: string,
|
||||
name: string
|
||||
}
|
||||
|
||||
export function extractDestinationAndName(filePath: string, destination: string | undefined): FilePathData {
|
||||
filePath = path.basename(filePath, path.extname(filePath)).replace(/[.-]/g, '');
|
||||
return {
|
||||
destination: destination ?? path.join(path.dirname(filePath), 'generated'),
|
||||
name: path.basename(filePath)
|
||||
};
|
||||
}
|
23
JSTQL/src/cli/generator.ts
Normal file
23
JSTQL/src/cli/generator.ts
Normal file
|
@ -0,0 +1,23 @@
|
|||
import type { Model } from "../language/generated/ast.js";
|
||||
import * as fs from "node:fs";
|
||||
import { CompositeGeneratorNode, NL, toString } from "langium";
|
||||
import * as path from "node:path";
|
||||
import { extractDestinationAndName } from "./cli-util.js";
|
||||
|
||||
export function generateJavaScript(
|
||||
model: Model,
|
||||
filePath: string,
|
||||
destination: string | undefined
|
||||
): string {
|
||||
const data = extractDestinationAndName(filePath, destination);
|
||||
const generatedFilePath = `${path.join(data.destination, data.name)}.js`;
|
||||
|
||||
const fileNode = new CompositeGeneratorNode();
|
||||
fileNode.append('"use strict";', NL, NL);
|
||||
|
||||
if (!fs.existsSync(data.destination)) {
|
||||
fs.mkdirSync(data.destination, { recursive: true });
|
||||
}
|
||||
fs.writeFileSync(generatedFilePath, toString(fileNode));
|
||||
return generatedFilePath;
|
||||
}
|
42
JSTQL/src/cli/main.ts
Normal file
42
JSTQL/src/cli/main.ts
Normal file
|
@ -0,0 +1,42 @@
|
|||
import type { Model } from '../language/generated/ast.js';
|
||||
import chalk from 'chalk';
|
||||
import { Command } from 'commander';
|
||||
import { JstqlLanguageMetaData } from '../language/generated/module.js';
|
||||
import { createJstqlServices } from '../language/jstql-module.js';
|
||||
import { extractAstNode } from './cli-util.js';
|
||||
import { generateJavaScript } from './generator.js';
|
||||
import { NodeFileSystem } from 'langium/node';
|
||||
import * as url from 'node:url';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as path from 'node:path';
|
||||
const __dirname = url.fileURLToPath(new URL('.', import.meta.url));
|
||||
|
||||
const packagePath = path.resolve(__dirname, '..', '..', 'package.json');
|
||||
const packageContent = await fs.readFile(packagePath, 'utf-8');
|
||||
|
||||
export const generateAction = async (fileName: string, opts: GenerateOptions): Promise<void> => {
|
||||
const services = createJstqlServices(NodeFileSystem).Jstql;
|
||||
const model = await extractAstNode<Model>(fileName, services);
|
||||
const generatedFilePath = generateJavaScript(model, fileName, opts.destination);
|
||||
console.log(chalk.green(`JavaScript code generated successfully: ${generatedFilePath}`));
|
||||
};
|
||||
|
||||
export type GenerateOptions = {
|
||||
destination?: string;
|
||||
}
|
||||
|
||||
export default function(): void {
|
||||
const program = new Command();
|
||||
|
||||
program.version(JSON.parse(packageContent).version);
|
||||
|
||||
const fileExtensions = JstqlLanguageMetaData.fileExtensions.join(', ');
|
||||
program
|
||||
.command('generate')
|
||||
.argument('<file>', `source file (possible file extensions: ${fileExtensions})`)
|
||||
.option('-d, --destination <dir>', 'destination directory of generating')
|
||||
.description('generates JavaScript code that prints "Hello, {name}!" for each greeting in a source file')
|
||||
.action(generateAction);
|
||||
|
||||
program.parse(process.argv);
|
||||
}
|
58
JSTQL/src/extension/main.ts
Normal file
58
JSTQL/src/extension/main.ts
Normal file
|
@ -0,0 +1,58 @@
|
|||
import type { LanguageClientOptions, ServerOptions} from 'vscode-languageclient/node.js';
|
||||
import * as vscode from 'vscode';
|
||||
import * as path from 'node:path';
|
||||
import { LanguageClient, TransportKind } from 'vscode-languageclient/node.js';
|
||||
|
||||
let client: LanguageClient;
|
||||
|
||||
// This function is called when the extension is activated.
|
||||
export function activate(context: vscode.ExtensionContext): void {
|
||||
client = startLanguageClient(context);
|
||||
}
|
||||
|
||||
// This function is called when the extension is deactivated.
|
||||
export function deactivate(): Thenable<void> | undefined {
|
||||
if (client) {
|
||||
return client.stop();
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function startLanguageClient(context: vscode.ExtensionContext): LanguageClient {
|
||||
const serverModule = context.asAbsolutePath(path.join('out', 'language', 'main.cjs'));
|
||||
// The debug options for the server
|
||||
// --inspect=6009: runs the server in Node's Inspector mode so VS Code can attach to the server for debugging.
|
||||
// By setting `process.env.DEBUG_BREAK` to a truthy value, the language server will wait until a debugger is attached.
|
||||
const debugOptions = { execArgv: ['--nolazy', `--inspect${process.env.DEBUG_BREAK ? '-brk' : ''}=${process.env.DEBUG_SOCKET || '6009'}`] };
|
||||
|
||||
// If the extension is launched in debug mode then the debug server options are used
|
||||
// Otherwise the run options are used
|
||||
const serverOptions: ServerOptions = {
|
||||
run: { module: serverModule, transport: TransportKind.ipc },
|
||||
debug: { module: serverModule, transport: TransportKind.ipc, options: debugOptions }
|
||||
};
|
||||
|
||||
const fileSystemWatcher = vscode.workspace.createFileSystemWatcher('**/*.jstql');
|
||||
context.subscriptions.push(fileSystemWatcher);
|
||||
|
||||
// Options to control the language client
|
||||
const clientOptions: LanguageClientOptions = {
|
||||
documentSelector: [{ scheme: 'file', language: 'jstql' }],
|
||||
synchronize: {
|
||||
// Notify the server about file changes to files contained in the workspace
|
||||
fileEvents: fileSystemWatcher
|
||||
}
|
||||
};
|
||||
|
||||
// Create the language client and start the client.
|
||||
const client = new LanguageClient(
|
||||
'jstql',
|
||||
'JSTQL',
|
||||
serverOptions,
|
||||
clientOptions
|
||||
);
|
||||
|
||||
// Start the client. This will also launch the server
|
||||
client.start();
|
||||
return client;
|
||||
}
|
63
JSTQL/src/language/jstql-module.ts
Normal file
63
JSTQL/src/language/jstql-module.ts
Normal file
|
@ -0,0 +1,63 @@
|
|||
import type { DefaultSharedModuleContext, LangiumServices, LangiumSharedServices, Module, PartialLangiumServices } from 'langium';
|
||||
import { createDefaultModule, createDefaultSharedModule, inject } from 'langium';
|
||||
import { JstqlGeneratedModule, JstqlGeneratedSharedModule } from './generated/module.js';
|
||||
import { JstqlValidator, registerValidationChecks } from './jstql-validator.js';
|
||||
|
||||
/**
|
||||
* Declaration of custom services - add your own service classes here.
|
||||
*/
|
||||
export type JstqlAddedServices = {
|
||||
validation: {
|
||||
JstqlValidator: JstqlValidator
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Union of Langium default services and your custom services - use this as constructor parameter
|
||||
* of custom service classes.
|
||||
*/
|
||||
export type JstqlServices = LangiumServices & JstqlAddedServices
|
||||
|
||||
/**
|
||||
* Dependency injection module that overrides Langium default services and contributes the
|
||||
* declared custom services. The Langium defaults can be partially specified to override only
|
||||
* selected services, while the custom services must be fully specified.
|
||||
*/
|
||||
export const JstqlModule: Module<JstqlServices, PartialLangiumServices & JstqlAddedServices> = {
|
||||
validation: {
|
||||
JstqlValidator: () => new JstqlValidator()
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Create the full set of services required by Langium.
|
||||
*
|
||||
* First inject the shared services by merging two modules:
|
||||
* - Langium default shared services
|
||||
* - Services generated by langium-cli
|
||||
*
|
||||
* Then inject the language-specific services by merging three modules:
|
||||
* - Langium default language-specific services
|
||||
* - Services generated by langium-cli
|
||||
* - Services specified in this file
|
||||
*
|
||||
* @param context Optional module context with the LSP connection
|
||||
* @returns An object wrapping the shared services and the language-specific services
|
||||
*/
|
||||
export function createJstqlServices(context: DefaultSharedModuleContext): {
|
||||
shared: LangiumSharedServices,
|
||||
Jstql: JstqlServices
|
||||
} {
|
||||
const shared = inject(
|
||||
createDefaultSharedModule(context),
|
||||
JstqlGeneratedSharedModule
|
||||
);
|
||||
const Jstql = inject(
|
||||
createDefaultModule({ shared }),
|
||||
JstqlGeneratedModule,
|
||||
JstqlModule
|
||||
);
|
||||
shared.ServiceRegistry.register(Jstql);
|
||||
registerValidationChecks(Jstql);
|
||||
return { shared, Jstql };
|
||||
}
|
105
JSTQL/src/language/jstql-validator.ts
Normal file
105
JSTQL/src/language/jstql-validator.ts
Normal file
|
@ -0,0 +1,105 @@
|
|||
import type { ValidationAcceptor, ValidationChecks } from "langium";
|
||||
import type { JstqlAstType, Case } from "./generated/ast.js";
|
||||
import type { JstqlServices } from "./jstql-module.js";
|
||||
|
||||
/**
|
||||
* Register custom validation checks.
|
||||
*/
|
||||
export function registerValidationChecks(services: JstqlServices) {
|
||||
const registry = services.validation.ValidationRegistry;
|
||||
const validator = services.validation.JstqlValidator;
|
||||
const checks: ValidationChecks<JstqlAstType> = {
|
||||
Case: validator.validateWildcards,
|
||||
};
|
||||
registry.register(checks, validator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of custom validations.
|
||||
*/
|
||||
export class JstqlValidator {
|
||||
validateWildcards(pair: Case, accept: ValidationAcceptor): void {
|
||||
try {
|
||||
let validationResultAplTo = validateWildcardAplTo(
|
||||
collectWildcard(pair.aplTo.apl_to_code.split(""))
|
||||
);
|
||||
if (validationResultAplTo.errors.length != 0) {
|
||||
accept("error", validationResultAplTo.errors.join("\n"), {
|
||||
node: pair.aplTo,
|
||||
property: "apl_to_code",
|
||||
});
|
||||
}
|
||||
|
||||
let validationResultTraTo = validateWildcardTraTo(
|
||||
collectWildcard(pair.traTo.transform_to_code.split("")),
|
||||
validationResultAplTo.env
|
||||
);
|
||||
|
||||
if (validationResultTraTo.length != 0) {
|
||||
accept("error", validationResultTraTo.join("\n"), {
|
||||
node: pair.traTo,
|
||||
property: "transform_to_code",
|
||||
});
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
}
|
||||
|
||||
function validateWildcardTraTo(wildcards: string[], env: string[]): string[] {
|
||||
let errors: string[] = [];
|
||||
for (let wildcard of wildcards) {
|
||||
if (!env.includes(wildcard)) {
|
||||
errors.push(
|
||||
"Wildcard " +
|
||||
wildcard +
|
||||
" Is not declared in applicable to block"
|
||||
);
|
||||
}
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
interface ValidationResultAplTo {
|
||||
env: string[];
|
||||
errors: string[];
|
||||
}
|
||||
|
||||
function validateWildcardAplTo(wildcards: string[]): ValidationResultAplTo {
|
||||
let env = [];
|
||||
let errors = [];
|
||||
for (let wildcard of wildcards) {
|
||||
let [identifier, types, ..._] = wildcard.split(":");
|
||||
env.push(identifier);
|
||||
if (_.length > 0) {
|
||||
errors.push("Too many : in wildcard");
|
||||
}
|
||||
|
||||
if (!types) {
|
||||
errors.push("No types given for wildcard " + identifier);
|
||||
}
|
||||
}
|
||||
return { env, errors };
|
||||
}
|
||||
|
||||
function collectWildcard(code: string[]): string[] {
|
||||
let flag = false;
|
||||
let wildcards: string[] = [];
|
||||
let wildcard = "";
|
||||
for (let i = 0; i < code.length; i++) {
|
||||
if (i != code.length && code[i] === ">" && code[i + 1] === ">") {
|
||||
flag = false;
|
||||
wildcards.push(wildcard.replace(/\s/g, ""));
|
||||
wildcard = "";
|
||||
i += 1;
|
||||
}
|
||||
if (flag) {
|
||||
wildcard += code[i];
|
||||
}
|
||||
|
||||
if (i != code.length - 1 && code[i] === "<" && code[i + 1] === "<") {
|
||||
flag = true;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
return wildcards;
|
||||
}
|
27
JSTQL/src/language/jstql.langium
Normal file
27
JSTQL/src/language/jstql.langium
Normal file
|
@ -0,0 +1,27 @@
|
|||
grammar Jstql
|
||||
|
||||
entry Model:
|
||||
(proposals+=Proposal)*;
|
||||
|
||||
Proposal:
|
||||
'proposal' name=ID "{"
|
||||
(case+=Case)+
|
||||
"}";
|
||||
|
||||
Case:
|
||||
"case" name=ID "{"
|
||||
aplTo=ApplicableTo
|
||||
traTo=TraTo
|
||||
"}";
|
||||
|
||||
ApplicableTo:
|
||||
"applicable" "to" "{"
|
||||
apl_to_code=STRING
|
||||
"}";
|
||||
TraTo:
|
||||
"transform" "to" "{"
|
||||
transform_to_code=STRING
|
||||
"}";
|
||||
hidden terminal WS: /\s+/;
|
||||
terminal ID: /[_a-zA-Z][\w_]*/;
|
||||
terminal STRING: /"[^"]*"|'[^']*'/;
|
13
JSTQL/src/language/main.ts
Normal file
13
JSTQL/src/language/main.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
import { startLanguageServer } from 'langium';
|
||||
import { NodeFileSystem } from 'langium/node';
|
||||
import { createConnection, ProposedFeatures } from 'vscode-languageserver/node.js';
|
||||
import { createJstqlServices } from './jstql-module.js';
|
||||
|
||||
// Create a connection to the client
|
||||
const connection = createConnection(ProposedFeatures.all);
|
||||
|
||||
// Inject the shared services and language-specific services
|
||||
const { shared } = createJstqlServices({ connection, ...NodeFileSystem });
|
||||
|
||||
// Start the language server with the shared services
|
||||
startLanguageServer(shared);
|
24
JSTQL/tsconfig.json
Normal file
24
JSTQL/tsconfig.json
Normal file
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2017",
|
||||
"module": "Node16",
|
||||
"lib": ["ESNext"],
|
||||
"sourceMap": true,
|
||||
"outDir": "out",
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitReturns": true,
|
||||
"noImplicitOverride": true,
|
||||
"moduleResolution": "Node16",
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"out",
|
||||
"node_modules"
|
||||
]
|
||||
}
|
BIN
bun.lockb
BIN
bun.lockb
Binary file not shown.
19
demo-site/index.html
Normal file
19
demo-site/index.html
Normal file
|
@ -0,0 +1,19 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>Page Title</title>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Demo site</h1>
|
||||
<p></p>
|
||||
|
||||
</body>
|
||||
</html>
|
17
dsl_files/awaitToPromise.jstql
Normal file
17
dsl_files/awaitToPromise.jstql
Normal file
|
@ -0,0 +1,17 @@
|
|||
proposal awaitToPomise{
|
||||
case single{
|
||||
applicable to {
|
||||
"let <<ident:Identifier>> = await <<awaitedExpr: Expression>>;
|
||||
<<statements: (Statement && !ReturnStatement && !ContinueStatement &&!BreakStatement)+>>
|
||||
return <<returnExpr: Expression>>
|
||||
"
|
||||
}
|
||||
|
||||
transform to{
|
||||
"return <<awaitedExpr>>.then(async <<ident>> => {
|
||||
<<statements>>
|
||||
return <<returnExpr>>
|
||||
});"
|
||||
}
|
||||
}
|
||||
}
|
33
dsl_files/do.jstql
Normal file
33
dsl_files/do.jstql
Normal file
|
@ -0,0 +1,33 @@
|
|||
proposal DoExpression{
|
||||
case arrowFunction{
|
||||
applicable to {
|
||||
"() => {
|
||||
<<statements: (Statement && !ReturnStatement)+>>
|
||||
return <<returnVal : Expression>>;
|
||||
}
|
||||
"
|
||||
}
|
||||
transform to {
|
||||
"(do {
|
||||
<<statements>>
|
||||
<<returnVal>>
|
||||
})"
|
||||
}
|
||||
}
|
||||
|
||||
case immediatelyInvokedUnnamedFunction {
|
||||
applicable to {
|
||||
"(function(){
|
||||
<<statements: (Statement && !ReturnStatement)+>>
|
||||
return <<returnVal : Expression>>;
|
||||
})();"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"(do {
|
||||
<<statements>>
|
||||
<<returnVal>>
|
||||
})"
|
||||
}
|
||||
}
|
||||
}
|
16
dsl_files/multi_stmt_test.jstql
Normal file
16
dsl_files/multi_stmt_test.jstql
Normal file
|
@ -0,0 +1,16 @@
|
|||
proposal MultiStmt{
|
||||
case Smthn{
|
||||
applicable to{
|
||||
"let <<ident1:Identifier>> = <<funcIdent:Identifier | MemberExpression>>();
|
||||
let <<ident2:Identifier>> = <<expr:Expression>>;
|
||||
"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"const ident2 = () => {
|
||||
let <<ident1>> = <<funcIdent>>();
|
||||
return <<expr>>;
|
||||
}"
|
||||
}
|
||||
}
|
||||
}
|
21
dsl_files/pipeline.jstql
Normal file
21
dsl_files/pipeline.jstql
Normal file
|
@ -0,0 +1,21 @@
|
|||
proposal Pipeline{
|
||||
|
||||
case SingleArgument {
|
||||
applicable to {
|
||||
"<<someFunctionIdent:Identifier || MemberExpression>>(<<someFunctionParam: Expression>>);"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"<<someFunctionParam>> |> <<someFunctionIdent>>(%);"
|
||||
}
|
||||
}
|
||||
|
||||
case TwoArgument{
|
||||
applicable to {
|
||||
"<<someFunctionIdent: Identifier || MemberExpression>>(<<someFunctionParam: Expression>>, <<moreFunctionParam: Expression>>)"
|
||||
}
|
||||
transform to {
|
||||
"<<someFunctionParam>> |> <<someFunctionIdent>>(%, <<moreFunctionParam>>)"
|
||||
}
|
||||
}
|
||||
}
|
14
dsl_files/pipelineDeep.jstql
Normal file
14
dsl_files/pipelineDeep.jstql
Normal file
|
@ -0,0 +1,14 @@
|
|||
proposal Pipeline{
|
||||
|
||||
case SingleArgument {
|
||||
applicable to {
|
||||
"<<someFunctionIdent:Identifier || MemberExpression>>(<<otherFunctionIdent:Identifier || MemberExpression>>(<<arggg:Identifier>>));"
|
||||
}
|
||||
|
||||
transform to {
|
||||
"(<<arggg>> |> <<otherFunctionIdent>>(%)) |> <<someFunctionIdent>>(%);"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
17
dsl_files/star.jstql
Normal file
17
dsl_files/star.jstql
Normal file
|
@ -0,0 +1,17 @@
|
|||
proposal Star{
|
||||
case a {
|
||||
applicable to {
|
||||
"let <<ident:Identifier>> = () => {
|
||||
<<statements: Statement>>
|
||||
return <<returnVal : Expression>>;
|
||||
}
|
||||
"
|
||||
}
|
||||
transform to {
|
||||
"let <<ident>> = do {
|
||||
<<statements>>
|
||||
<<returnVal>>
|
||||
}"
|
||||
}
|
||||
}
|
||||
}
|
10
dsl_files/test_single_stmt.jstql
Normal file
10
dsl_files/test_single_stmt.jstql
Normal file
|
@ -0,0 +1,10 @@
|
|||
proposal test_single_stmt{
|
||||
pair one {
|
||||
applicable to {
|
||||
"let <<aaaa: Identifier >> = <<bbbb: Expression | Identifier>>"
|
||||
}
|
||||
transform to {
|
||||
"let <<aaaa>> = 1 + <<bbbb>>;"
|
||||
}
|
||||
}
|
||||
}
|
21
grammars/wildcard_grammar.txt
Normal file
21
grammars/wildcard_grammar.txt
Normal file
|
@ -0,0 +1,21 @@
|
|||
|
||||
Wildcard:
|
||||
Identifier ":" TypeExpr ("*"?)
|
||||
TypeExpr:
|
||||
BinaryExpr
|
||||
| UnaryExpr
|
||||
| PrimitiveExpr
|
||||
|
||||
BinaryExpr:
|
||||
TypeExpr { Operator TypeExpr }*
|
||||
|
||||
UnaryExpr:
|
||||
{UnaryOperator}? TypeExpr
|
||||
|
||||
PrimitiveExpr:
|
||||
GroupExpr | Identifier
|
||||
|
||||
GroupExpr:
|
||||
"(" TypeExpr ")"
|
||||
|
||||
|
94
output_files/output.js
Normal file
94
output_files/output.js
Normal file
|
@ -0,0 +1,94 @@
|
|||
function parse() {
|
||||
const input = ("input" |> document.getElementById(%)).value;
|
||||
const data = 32 |> input.slice(%);
|
||||
const compressedData = data |> decode_base64(%);
|
||||
const uncompressed = pako.inflate(compressedData, {
|
||||
to: "string"
|
||||
});
|
||||
const json = uncompressed |> JSON.parse(%);
|
||||
json |> console.log(%);
|
||||
json |> convertToDesktop(%);
|
||||
}
|
||||
function convertToDesktop(json) {
|
||||
const newValues = {
|
||||
crb: false,
|
||||
newClanRaidClassId: 0,
|
||||
newClanRaidClassLevel: 0,
|
||||
pendingImmortalSouls: 0,
|
||||
pendingRaidRubies: 0,
|
||||
immortalSouls: 0,
|
||||
lastPurchaseTime: 0,
|
||||
lastRaidAttemptTimestamp: 0,
|
||||
lastRaidRewardCheckTimestamp: 0,
|
||||
shouldShowHZERoster: false,
|
||||
lastBonusRewardCheckTimestamp: 0
|
||||
};
|
||||
const mappedValues = {
|
||||
rubies: json.rubies / 10 |> Math.round(%)
|
||||
};
|
||||
const pcSpecificValues = {
|
||||
readPatchNumber: "1.0e12",
|
||||
saveOrigin: "pc"
|
||||
};
|
||||
const hash = "7a990d405d2c6fb93aa8fbb0ec1a3b23";
|
||||
const newData = {
|
||||
...newValues,
|
||||
...json,
|
||||
...mappedValues,
|
||||
...pcSpecificValues
|
||||
};
|
||||
const compressed = pako.deflate(newData |> JSON.stringify(%), {
|
||||
to: "string"
|
||||
});
|
||||
const base64 = compressed |> btoa(%);
|
||||
const finalSaveString = hash + base64;
|
||||
("output_output" |> document.getElementById(%)).innerText = finalSaveString;
|
||||
showOutput();
|
||||
}
|
||||
function showOutput() {
|
||||
("outputs" |> document.getElementById(%)).style.visibility = "visible";
|
||||
}
|
||||
function copyOutput() {
|
||||
const output = "output_output" |> document.getElementById(%);
|
||||
output.disabled = false;
|
||||
output.focus();
|
||||
output.select();
|
||||
"copy" |> document.execCommand(%);
|
||||
output.disabled = true;
|
||||
const successElement = "copy_success_msg" |> document.getElementById(%);
|
||||
successElement.style.visibility = "visible";
|
||||
setTimeout(() => successElement.style.visibility = "hidden", 4000);
|
||||
}
|
||||
function decode_base64(s) {
|
||||
let e = {},
|
||||
i,
|
||||
k,
|
||||
v = [],
|
||||
r = "",
|
||||
w = String.fromCharCode;
|
||||
let n = [[65, 91], [97, 123], [48, 58], [43, 44], [47, 48]];
|
||||
for (z in n) {
|
||||
for (i = n[z][0]; i < n[z][1]; i++) {
|
||||
i |> w(%) |> v.push(%);
|
||||
}
|
||||
}
|
||||
for (i = 0; i < 64; i++) {
|
||||
e[v[i]] = i;
|
||||
}
|
||||
for (i = 0; i < s.length; i += 72) {
|
||||
let b = 0,
|
||||
c,
|
||||
x,
|
||||
l = 0,
|
||||
o = s.substring(i, i + 72);
|
||||
for (x = 0; x < o.length; x++) {
|
||||
c = e[x |> o.charAt(%)];
|
||||
b = (b << 6) + c;
|
||||
l += 6;
|
||||
while (l >= 8) {
|
||||
r += (b >>> (l -= 8)) % 256 |> w(%);
|
||||
}
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
9
output_files/output_await_to_promise.js
Normal file
9
output_files/output_await_to_promise.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
async function something() {
|
||||
let a = 100;
|
||||
a *= 100000;
|
||||
return fetch("https://uib.no").then(uib => {
|
||||
a += 100000;
|
||||
a -= 1000;
|
||||
return [a, uib];
|
||||
});
|
||||
}
|
12
output_files/output_do.js
Normal file
12
output_files/output_do.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
let aaaa = do {
|
||||
let g = 100;
|
||||
let ff = 10;
|
||||
let ggg = a(b);
|
||||
100
|
||||
};
|
||||
var bbaaa = do {
|
||||
let lllll = 1 + 1;
|
||||
100 + 100;
|
||||
const aaaaa = aaaa(bb);
|
||||
lllll
|
||||
};
|
2
output_files/output_multi.js
Normal file
2
output_files/output_multi.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
let ThisTest = LOOOOOOOOL();
|
||||
let HAHHAHAH = 1 + 1;
|
4
output_files/output_pipeline.js
Normal file
4
output_files/output_pipeline.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
a |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%);
|
||||
a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, b)))))));
|
||||
a |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a);
|
||||
b(b(b(b(a, a, a), a, a), a, a), a, a);
|
4
output_files/pipeline_out.js
Normal file
4
output_files/pipeline_out.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
a |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%);
|
||||
a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, b)))))));
|
||||
a |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a);
|
||||
b(b(b(b(a, a, a), a, a), a, a), a, a);
|
104
output_files/test2.js
Normal file
104
output_files/test2.js
Normal file
|
@ -0,0 +1,104 @@
|
|||
return awaitedExpr.then(async _geometry => {
|
||||
const _intersectPoint = /*@__PURE__*/new Vector3();
|
||||
const _worldScale = /*@__PURE__*/new Vector3();
|
||||
const _mvPosition = /*@__PURE__*/new Vector3();
|
||||
const _alignedPosition = /*@__PURE__*/new Vector2();
|
||||
const _rotatedPosition = /*@__PURE__*/new Vector2();
|
||||
const _viewWorldMatrix = /*@__PURE__*/new Matrix4();
|
||||
const _vA = /*@__PURE__*/new Vector3();
|
||||
const _vB = /*@__PURE__*/new Vector3();
|
||||
const _vC = /*@__PURE__*/new Vector3();
|
||||
const _uvA = /*@__PURE__*/new Vector2();
|
||||
const _uvB = /*@__PURE__*/new Vector2();
|
||||
const _uvC = /*@__PURE__*/new Vector2();
|
||||
class Sprite extends Object3D {
|
||||
constructor(material = new SpriteMaterial()) {
|
||||
super();
|
||||
this.isSprite = true;
|
||||
this.type = "Sprite";
|
||||
if (_geometry === undefined) {
|
||||
_geometry = new BufferGeometry();
|
||||
const float32Array = new Float32Array([-0.5, -0.5, 0, 0, 0, 0.5, -0.5, 0, 1, 0, 0.5, 0.5, 0, 1, 1, -0.5, 0.5, 0, 0, 1]);
|
||||
const interleavedBuffer = new InterleavedBuffer(float32Array, 5);
|
||||
_geometry.setIndex([0, 1, 2, 0, 2, 3]);
|
||||
_geometry.setAttribute("position", new InterleavedBufferAttribute(interleavedBuffer, 3, 0, false));
|
||||
_geometry.setAttribute("uv", new InterleavedBufferAttribute(interleavedBuffer, 2, 3, false));
|
||||
}
|
||||
this.geometry = _geometry;
|
||||
this.material = material;
|
||||
this.center = new Vector2(0.5, 0.5);
|
||||
}
|
||||
raycast(raycaster, intersects) {
|
||||
if (raycaster.camera === null) {
|
||||
console.error('THREE.Sprite: "Raycaster.camera" needs to be set in order to raycast against sprites.');
|
||||
}
|
||||
_worldScale.setFromMatrixScale(this.matrixWorld);
|
||||
_viewWorldMatrix.copy(raycaster.camera.matrixWorld);
|
||||
this.modelViewMatrix.multiplyMatrices(raycaster.camera.matrixWorldInverse, this.matrixWorld);
|
||||
_mvPosition.setFromMatrixPosition(this.modelViewMatrix);
|
||||
if (raycaster.camera.isPerspectiveCamera && this.material.sizeAttenuation === false) {
|
||||
_worldScale.multiplyScalar(-_mvPosition.z);
|
||||
}
|
||||
const rotation = this.material.rotation;
|
||||
let sin, cos;
|
||||
if (rotation !== 0) {
|
||||
cos = Math.cos(rotation);
|
||||
sin = Math.sin(rotation);
|
||||
}
|
||||
const center = this.center;
|
||||
transformVertex(_vA.set(-0.5, -0.5, 0), _mvPosition, center, _worldScale, sin, cos);
|
||||
transformVertex(_vB.set(0.5, -0.5, 0), _mvPosition, center, _worldScale, sin, cos);
|
||||
transformVertex(_vC.set(0.5, 0.5, 0), _mvPosition, center, _worldScale, sin, cos);
|
||||
_uvA.set(0, 0);
|
||||
_uvB.set(1, 0);
|
||||
_uvC.set(1, 1);
|
||||
|
||||
// check first triangle
|
||||
let intersect = raycaster.ray.intersectTriangle(_vA, _vB, _vC, false, _intersectPoint);
|
||||
if (intersect === null) {
|
||||
// check second triangle
|
||||
transformVertex(_vB.set(-0.5, 0.5, 0), _mvPosition, center, _worldScale, sin, cos);
|
||||
_uvB.set(0, 1);
|
||||
intersect = raycaster.ray.intersectTriangle(_vA, _vC, _vB, false, _intersectPoint);
|
||||
if (intersect === null) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
const distance = raycaster.ray.origin.distanceTo(_intersectPoint);
|
||||
if (distance < raycaster.near || distance > raycaster.far) return;
|
||||
intersects.push({
|
||||
distance: distance,
|
||||
point: _intersectPoint.clone(),
|
||||
uv: Triangle.getInterpolation(_intersectPoint, _vA, _vB, _vC, _uvA, _uvB, _uvC, new Vector2()),
|
||||
face: null,
|
||||
object: this
|
||||
});
|
||||
}
|
||||
copy(source, recursive) {
|
||||
super.copy(source, recursive);
|
||||
if (source.center !== undefined) this.center.copy(source.center);
|
||||
this.material = source.material;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
function transformVertex(vertexPosition, mvPosition, center, scale, sin, cos) {
|
||||
// compute position in camera space
|
||||
_alignedPosition.subVectors(vertexPosition, center).addScalar(0.5).multiply(scale);
|
||||
|
||||
// to check if rotation is not zero
|
||||
if (sin !== undefined) {
|
||||
_rotatedPosition.x = cos * _alignedPosition.x - sin * _alignedPosition.y;
|
||||
_rotatedPosition.y = sin * _alignedPosition.x + cos * _alignedPosition.y;
|
||||
} else {
|
||||
_rotatedPosition.copy(_alignedPosition);
|
||||
}
|
||||
vertexPosition.copy(mvPosition);
|
||||
vertexPosition.x += _rotatedPosition.x;
|
||||
vertexPosition.y += _rotatedPosition.y;
|
||||
|
||||
// transform to world space
|
||||
vertexPosition.applyMatrix4(_viewWorldMatrix);
|
||||
}
|
||||
export { Sprite };
|
||||
return returnExpr;
|
||||
});
|
94
output_files/test_2.js
Normal file
94
output_files/test_2.js
Normal file
|
@ -0,0 +1,94 @@
|
|||
function parse() {
|
||||
const input = ("input" |> document.getElementById(%)).value;
|
||||
const data = 32 |> input.slice(%);
|
||||
const compressedData = data |> decode_base64(%);
|
||||
const uncompressed = compressedData |> pako.inflate(%, {
|
||||
to: "string"
|
||||
});
|
||||
const json = uncompressed |> JSON.parse(%);
|
||||
json |> console.log(%);
|
||||
json |> convertToDesktop(%);
|
||||
}
|
||||
function convertToDesktop(json) {
|
||||
const newValues = {
|
||||
crb: false,
|
||||
newClanRaidClassId: 0,
|
||||
newClanRaidClassLevel: 0,
|
||||
pendingImmortalSouls: 0,
|
||||
pendingRaidRubies: 0,
|
||||
immortalSouls: 0,
|
||||
lastPurchaseTime: 0,
|
||||
lastRaidAttemptTimestamp: 0,
|
||||
lastRaidRewardCheckTimestamp: 0,
|
||||
shouldShowHZERoster: false,
|
||||
lastBonusRewardCheckTimestamp: 0
|
||||
};
|
||||
const mappedValues = {
|
||||
rubies: json.rubies / 10 |> Math.round(%)
|
||||
};
|
||||
const pcSpecificValues = {
|
||||
readPatchNumber: "1.0e12",
|
||||
saveOrigin: "pc"
|
||||
};
|
||||
const hash = "7a990d405d2c6fb93aa8fbb0ec1a3b23";
|
||||
const newData = {
|
||||
...newValues,
|
||||
...json,
|
||||
...mappedValues,
|
||||
...pcSpecificValues
|
||||
};
|
||||
const compressed = newData |> JSON.stringify(%) |> pako.deflate(%, {
|
||||
to: "string"
|
||||
});
|
||||
const base64 = compressed |> btoa(%);
|
||||
const finalSaveString = hash + base64;
|
||||
("output_output" |> document.getElementById(%)).innerText = finalSaveString;
|
||||
showOutput();
|
||||
}
|
||||
function showOutput() {
|
||||
("outputs" |> document.getElementById(%)).style.visibility = "visible";
|
||||
}
|
||||
function copyOutput() {
|
||||
const output = "output_output" |> document.getElementById(%);
|
||||
output.disabled = false;
|
||||
output.focus();
|
||||
output.select();
|
||||
"copy" |> document.execCommand(%);
|
||||
output.disabled = true;
|
||||
const successElement = "copy_success_msg" |> document.getElementById(%);
|
||||
successElement.style.visibility = "visible";
|
||||
(() => successElement.style.visibility = "hidden") |> setTimeout(%, 4000);
|
||||
}
|
||||
function decode_base64(s) {
|
||||
let e = {},
|
||||
i,
|
||||
k,
|
||||
v = [],
|
||||
r = "",
|
||||
w = String.fromCharCode;
|
||||
let n = [[65, 91], [97, 123], [48, 58], [43, 44], [47, 48]];
|
||||
for (z in n) {
|
||||
for (i = n[z][0]; i < n[z][1]; i++) {
|
||||
i |> w(%) |> v.push(%);
|
||||
}
|
||||
}
|
||||
for (i = 0; i < 64; i++) {
|
||||
e[v[i]] = i;
|
||||
}
|
||||
for (i = 0; i < s.length; i += 72) {
|
||||
let b = 0,
|
||||
c,
|
||||
x,
|
||||
l = 0,
|
||||
o = i |> s.substring(%, i + 72);
|
||||
for (x = 0; x < o.length; x++) {
|
||||
c = e[x |> o.charAt(%)];
|
||||
b = (b << 6) + c;
|
||||
l += 6;
|
||||
while (l >= 8) {
|
||||
r += (b >>> (l -= 8)) % 256 |> w(%);
|
||||
}
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
9
output_files/testing.js
Normal file
9
output_files/testing.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
async function something() {
|
||||
let a = 100;
|
||||
a *= 100000;
|
||||
return fetch("https://uib.no").then(uib => {
|
||||
a += 100000;
|
||||
a -= 1000;
|
||||
return [a, uib];
|
||||
});
|
||||
}
|
98
output_files/testingLOL.js
Normal file
98
output_files/testingLOL.js
Normal file
|
@ -0,0 +1,98 @@
|
|||
// "fast-glob" and `createTwoFilesPatch` are bundled here since the API uses `micromatch` and `diff` too
|
||||
import { createTwoFilesPatch } from "diff/lib/patch/create.js";
|
||||
import fastGlob from "fast-glob";
|
||||
import * as vnopts from "vnopts";
|
||||
import * as errors from "./common/errors.js";
|
||||
import getFileInfoWithoutPlugins from "./common/get-file-info.js";
|
||||
import mockable from "./common/mockable.js";
|
||||
import { clearCache as clearConfigCache, resolveConfig, resolveConfigFile } from "./config/resolve-config.js";
|
||||
import * as core from "./main/core.js";
|
||||
import { formatOptionsHiddenDefaults } from "./main/normalize-format-options.js";
|
||||
import normalizeOptions from "./main/normalize-options.js";
|
||||
import * as optionCategories from "./main/option-categories.js";
|
||||
import { clearCache as clearPluginCache, loadBuiltinPlugins, loadPlugins } from "./main/plugins/index.js";
|
||||
import { getSupportInfo as getSupportInfoWithoutPlugins, normalizeOptionSettings } from "./main/support.js";
|
||||
import { createIsIgnoredFunction } from "./utils/ignore.js";
|
||||
import isNonEmptyArray from "./utils/is-non-empty-array.js";
|
||||
import omit from "./utils/object-omit.js";
|
||||
import partition from "./utils/partition.js";
|
||||
|
||||
/**
|
||||
* @param {*} fn
|
||||
* @param {number} [optionsArgumentIndex]
|
||||
* @returns {*}
|
||||
*/
|
||||
function withPlugins(fn, optionsArgumentIndex = 1 // Usually `options` is the 2nd argument
|
||||
) {
|
||||
return async (...args) => {
|
||||
const options = args[optionsArgumentIndex] ?? {};
|
||||
const {
|
||||
plugins = []
|
||||
} = options;
|
||||
args[optionsArgumentIndex] = {
|
||||
...options,
|
||||
plugins: (await ([loadBuiltinPlugins(), plugins |> loadPlugins(%)] |> Promise.all(%))).flat()
|
||||
};
|
||||
return fn(...args);
|
||||
};
|
||||
}
|
||||
const formatWithCursor = core.formatWithCursor |> withPlugins(%);
|
||||
async function format(text, options) {
|
||||
const {
|
||||
formatted
|
||||
} = await (text |> formatWithCursor(%, {
|
||||
...options,
|
||||
cursorOffset: -1
|
||||
}));
|
||||
return formatted;
|
||||
}
|
||||
async function check(text, options) {
|
||||
return (await (text |> format(%, options))) === text;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line require-await
|
||||
async function clearCache() {
|
||||
clearConfigCache();
|
||||
clearPluginCache();
|
||||
}
|
||||
|
||||
/** @type {typeof getFileInfoWithoutPlugins} */
|
||||
const getFileInfo = getFileInfoWithoutPlugins |> withPlugins(%);
|
||||
|
||||
/** @type {typeof getSupportInfoWithoutPlugins} */
|
||||
const getSupportInfo = getSupportInfoWithoutPlugins |> withPlugins(%, 0);
|
||||
|
||||
// Internal shared with cli
|
||||
const sharedWithCli = {
|
||||
errors,
|
||||
optionCategories,
|
||||
createIsIgnoredFunction,
|
||||
formatOptionsHiddenDefaults,
|
||||
normalizeOptions,
|
||||
getSupportInfoWithoutPlugins,
|
||||
normalizeOptionSettings,
|
||||
vnopts: {
|
||||
ChoiceSchema: vnopts.ChoiceSchema,
|
||||
apiDescriptor: vnopts.apiDescriptor
|
||||
},
|
||||
fastGlob,
|
||||
createTwoFilesPatch,
|
||||
utils: {
|
||||
isNonEmptyArray,
|
||||
partition,
|
||||
omit
|
||||
},
|
||||
mockable
|
||||
};
|
||||
const debugApis = {
|
||||
parse: core.parse |> withPlugins(%),
|
||||
formatAST: core.formatAst |> withPlugins(%),
|
||||
formatDoc: core.formatDoc |> withPlugins(%),
|
||||
printToDoc: core.printToDoc |> withPlugins(%),
|
||||
printDocToString: core.printDocToString |> withPlugins(%),
|
||||
mockable
|
||||
};
|
||||
export { debugApis as __debug, sharedWithCli as __internal, check, clearCache as clearConfigCache, format, formatWithCursor, getFileInfo, getSupportInfo, resolveConfig, resolveConfigFile };
|
||||
export * as doc from "./document/public.js";
|
||||
export { default as version } from "./main/version.evaluate.cjs";
|
||||
export * as util from "./utils/public.js";
|
810
package-lock.json
generated
810
package-lock.json
generated
File diff suppressed because it is too large
Load diff
18
package.json
18
package.json
|
@ -2,14 +2,28 @@
|
|||
"name": "didactic-chainsaw",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"": "bun run src/index.ts",
|
||||
"watch": "bun --watch src/index.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"bun-types": "latest",
|
||||
"@babel/plugin-proposal-pipeline-operator": "^7.23.3",
|
||||
"@babel/plugin-syntax-top-level-await": "^7.14.5",
|
||||
"@swc/cli": "^0.1.62",
|
||||
"@swc/core": "^1.3.83",
|
||||
"@types/babel__generator": "^7.6.8",
|
||||
"@types/node": "^20.5.9",
|
||||
"bun-types": "latest",
|
||||
"typescript": "^5.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.23.7",
|
||||
"@babel/generator": "^7.23.0",
|
||||
"@babel/parser": "^7.23.0",
|
||||
"@babel/traverse": "^7.23.0",
|
||||
"@types/babel__traverse": "^7.20.5",
|
||||
"@types/babel-traverse": "^6.25.10",
|
||||
"babel": "^6.23.0",
|
||||
"bun": "^1.0.4",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
}
|
||||
|
|
0
src/babel.config.json
Normal file
0
src/babel.config.json
Normal file
65
src/data_structures/tree.ts
Normal file
65
src/data_structures/tree.ts
Normal file
|
@ -0,0 +1,65 @@
|
|||
import * as babelparser from "@babel/parser";
|
||||
|
||||
import traverse from "@babel/traverse";
|
||||
import * as t from "@babel/types";
|
||||
import { PairedNodes } from "../matcher/matcher";
|
||||
|
||||
export class TreeNode<T> {
|
||||
public parent: TreeNode<T> | null;
|
||||
public element: T;
|
||||
public children: TreeNode<T>[] = [];
|
||||
|
||||
constructor(parent: TreeNode<T> | null, element: T) {
|
||||
this.parent = parent;
|
||||
this.element = element;
|
||||
if (this.parent) this.parent.children.push(this);
|
||||
}
|
||||
}
|
||||
|
||||
export const makeTree = (
|
||||
ast: babelparser.ParseResult<t.File>
|
||||
): TreeNode<t.Node> | undefined => {
|
||||
let last: TreeNode<t.Node> | null = null;
|
||||
|
||||
let first: TreeNode<t.Node> | null = null;
|
||||
traverse(ast, {
|
||||
enter(path: any) {
|
||||
let node: TreeNode<t.Node> = new TreeNode<t.Node>(
|
||||
last,
|
||||
path.node as t.Node
|
||||
);
|
||||
|
||||
if (last == null) {
|
||||
first = node;
|
||||
}
|
||||
last = node;
|
||||
},
|
||||
exit(path: any) {
|
||||
if (last && last?.element?.type != "Program") {
|
||||
last = last.parent;
|
||||
}
|
||||
},
|
||||
});
|
||||
if (first != null) {
|
||||
return first;
|
||||
}
|
||||
};
|
||||
export const showTree = (tree: TreeNode<t.Node>, idents: number = 0) => {
|
||||
console.log(" ".repeat(idents) + tree.element?.type);
|
||||
tree.children.forEach((child) => {
|
||||
showTree(child, idents + 1);
|
||||
});
|
||||
};
|
||||
export const showTreePaired = (
|
||||
tree: TreeNode<PairedNodes>,
|
||||
idents: number = 0
|
||||
) => {
|
||||
console.log(
|
||||
" ".repeat(idents),
|
||||
tree.element.aplToNode.type,
|
||||
tree.element.codeNode.map((x) => x.type)
|
||||
);
|
||||
tree.children.forEach((child) => {
|
||||
showTreePaired(child, idents + 1);
|
||||
});
|
||||
};
|
146
src/index.ts
146
src/index.ts
|
@ -1,80 +1,92 @@
|
|||
import swc from "@swc/core";
|
||||
import { MatchScript } from "./types";
|
||||
//import * as babelparser from "../babel/packages/babel-parser";
|
||||
import * as babelparser from "@babel/parser";
|
||||
//import core from "../babel/packages/babel-core";
|
||||
import { parse_with_plugins } from "./parser/parse";
|
||||
import {
|
||||
SelfHostedRecipe,
|
||||
TransformRecipe,
|
||||
transform,
|
||||
} from "./transform/transform";
|
||||
import { readdir } from "node:fs/promises";
|
||||
import { parseJSTQL } from "./langium/langiumRunner";
|
||||
|
||||
const PATTERN_PATH = "src/patterns/test.json";
|
||||
const dir = "../prettier/src";
|
||||
|
||||
const path = "test_files/test2.js";
|
||||
const file = Bun.file(path);
|
||||
const codeFromFile = await file.text();
|
||||
const main = async () => {
|
||||
console.log(Bun.version);
|
||||
//transform(selfHostedTransformExampleMultiStmt, codeFromFile);
|
||||
|
||||
let inputFile = await Bun.file("src/test_files/simple.js").text();
|
||||
/*
|
||||
console.log(codeFromFile);
|
||||
const jstql_file =
|
||||
"/home/rolfmg/Coding/Master/didactic-chainsaw/dsl_files/awaitToPromise.jstql";
|
||||
const test_file = Bun.file(jstql_file);
|
||||
const test_JSTQL = await test_file.text();
|
||||
let proposals = await parseJSTQL(test_JSTQL);
|
||||
|
||||
console.log(
|
||||
"=====================\nCurrent file to be transformed : \n" +
|
||||
inputFile +
|
||||
"\n===================="
|
||||
);
|
||||
swc.parseFile("src/test_files/simple.js", {
|
||||
syntax: "ecmascript",
|
||||
jsx: false,
|
||||
let [code, count] = transform(proposals[0].cases, codeFromFile);
|
||||
await Bun.write("output_files/test2.js", code);
|
||||
return;
|
||||
*/
|
||||
let basepathExamplesJSFiles = "../next.js";
|
||||
let examples = (await readdir(basepathExamplesJSFiles, { recursive: true }))
|
||||
.filter((x) => x.endsWith(".js"))
|
||||
.map((x) => basepathExamplesJSFiles + "/" + x);
|
||||
console.log(examples);
|
||||
let result = [];
|
||||
for (let proposalFile of [
|
||||
"pipeline.jstql",
|
||||
"do.jstql",
|
||||
"awaitToPromise.jstql",
|
||||
].slice(1, 2)) {
|
||||
const jstql_file = "dsl_files/" + proposalFile;
|
||||
const test_file = Bun.file(jstql_file);
|
||||
const test_JSTQL = await test_file.text();
|
||||
let proposals = await parseJSTQL(test_JSTQL);
|
||||
|
||||
target: "es2022",
|
||||
|
||||
isModule: false,
|
||||
}).then((module) => {
|
||||
//console.log(module);
|
||||
// swc.print(module).then((o: swc.Output) => {
|
||||
// console.log(o);
|
||||
// });
|
||||
|
||||
console.log(module.body);
|
||||
|
||||
matchStatements(module).then((a) => {
|
||||
console.log(
|
||||
"================\nOutput code: \n" +
|
||||
a.code +
|
||||
"\n========================"
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const matchStatements = async (module: swc.Script) => {
|
||||
const patternFile = Bun.file(PATTERN_PATH);
|
||||
const [from, to]: [MatchScript, MatchScript] = JSON.parse(
|
||||
await patternFile.text()
|
||||
);
|
||||
return await swc.printSync(match(from, to, module));
|
||||
};
|
||||
|
||||
const match = (from: any, to: any, module: swc.Script): swc.Script => {
|
||||
console.log(to);
|
||||
console.log(module);
|
||||
console.log(from);
|
||||
|
||||
for (const obj of module.body) {
|
||||
let allPresent = true;
|
||||
for (const key in obj) {
|
||||
if (!(key in from)) {
|
||||
allPresent = false;
|
||||
}
|
||||
}
|
||||
if (allPresent) {
|
||||
console.log("Found first match!");
|
||||
for (const [key, val] of Object.entries(obj)) {
|
||||
match(from["key"], to, val);
|
||||
let sum = 0;
|
||||
let failures = 0;
|
||||
let filesSucceeded = 0;
|
||||
console.log("Scripts found ", sum, "matches!");
|
||||
let count = 0;
|
||||
for (let examplesFile of examples) {
|
||||
try {
|
||||
if (examplesFile.split("/").includes("compiled")) {
|
||||
//continue;
|
||||
}
|
||||
console.log(examplesFile);
|
||||
let script = await Bun.file(examplesFile).text();
|
||||
let [resultString, matches] = transform(
|
||||
proposals[0].cases,
|
||||
script
|
||||
);
|
||||
sum += matches;
|
||||
console.log(matches);
|
||||
if (matches > 0) {
|
||||
await Bun.write(
|
||||
"output_testing/" +
|
||||
count +
|
||||
examplesFile.split("/").join("_"),
|
||||
resultString
|
||||
);
|
||||
count += 1;
|
||||
}
|
||||
filesSucceeded += 1;
|
||||
} catch (e) {
|
||||
failures += 1;
|
||||
//console.log(e);
|
||||
}
|
||||
console.log("current sum", sum);
|
||||
}
|
||||
result.push(
|
||||
proposalFile + ", " + sum + ", " + count + ", " + filesSucceeded
|
||||
);
|
||||
}
|
||||
|
||||
return module;
|
||||
};
|
||||
|
||||
const matchAndReplace = (
|
||||
statement: swc.Statement,
|
||||
from: Object,
|
||||
to: Object
|
||||
) => {
|
||||
for (const [key, value] of Object.entries(from)) {
|
||||
for (let res of result) {
|
||||
console.log(res);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
20
src/langium/langiumRunner.ts
Normal file
20
src/langium/langiumRunner.ts
Normal file
|
@ -0,0 +1,20 @@
|
|||
import { TransformRecipe, Proposal as LocalProp } from "../transform/transform";
|
||||
import { parseDSLtoAST } from "../../JSTQL/src/JSTQL_interface/api";
|
||||
import { Model, Case } from "../../JSTQL/src/language/generated/ast";
|
||||
|
||||
export async function parseJSTQL(jstql: string): Promise<LocalProp[]> {
|
||||
let model: Model = await parseDSLtoAST(jstql);
|
||||
let localProposals: LocalProp[] = [];
|
||||
for (let proposal of model.proposals) {
|
||||
let cases: TransformRecipe[] = [];
|
||||
|
||||
for (let singleCase of proposal.case) {
|
||||
cases.push({
|
||||
applicableTo: singleCase.aplTo.apl_to_code,
|
||||
transformTo: singleCase.traTo.transform_to_code,
|
||||
});
|
||||
}
|
||||
localProposals.push({ cases });
|
||||
}
|
||||
return localProposals;
|
||||
}
|
343
src/matcher/matcher.ts
Normal file
343
src/matcher/matcher.ts
Normal file
|
@ -0,0 +1,343 @@
|
|||
import * as t from "@babel/types";
|
||||
|
||||
import * as babelparser from "@babel/parser";
|
||||
import { TreeNode, makeTree, showTree } from "../data_structures/tree";
|
||||
import { Wildcard } from "../parser/parse";
|
||||
import generate from "@babel/generator";
|
||||
import { WildcardEvalVisitor } from "./wildcardEvaluator";
|
||||
|
||||
const keys_to_ignore = ["loc", "start", "end", "type"];
|
||||
export interface MatchedTreeNode {
|
||||
aplToNode: TreeNode<t.Node>;
|
||||
codeNode: TreeNode<t.Node>;
|
||||
}
|
||||
|
||||
export interface PairedNodes {
|
||||
aplToNode: t.Node;
|
||||
codeNode: t.Node[];
|
||||
}
|
||||
|
||||
export interface Match {
|
||||
statements: TreeNode<PairedNodes>[];
|
||||
}
|
||||
|
||||
enum MatchResult {
|
||||
MatchedWithWildcard,
|
||||
MatchedWithStarredWildcard,
|
||||
Matched,
|
||||
NoMatch,
|
||||
}
|
||||
|
||||
export function runMatch(
|
||||
code: TreeNode<t.Node>,
|
||||
applicableTo: TreeNode<t.Node>,
|
||||
internals: Wildcard[]
|
||||
): Match[] {
|
||||
// Special case for a single expression, we have to remove "ExpressionStatement" node.
|
||||
if (applicableTo.children.length === 1) {
|
||||
if (applicableTo.children[0].element.type === "ExpressionStatement") {
|
||||
let matcher = new Matcher(
|
||||
internals,
|
||||
applicableTo.children[0].children[0].element
|
||||
);
|
||||
matcher.singleExprMatcher(
|
||||
code,
|
||||
applicableTo.children[0].children[0]
|
||||
);
|
||||
return matcher.matches;
|
||||
} else {
|
||||
let matcher = new Matcher(
|
||||
internals,
|
||||
applicableTo.children[0].element
|
||||
);
|
||||
matcher.singleExprMatcher(code, applicableTo.children[0]);
|
||||
return matcher.matches;
|
||||
}
|
||||
} else {
|
||||
let matcher = new Matcher(internals, applicableTo.element);
|
||||
matcher.multiStatementMatcher(code, applicableTo);
|
||||
|
||||
return matcher.matches;
|
||||
}
|
||||
}
|
||||
|
||||
export class Matcher {
|
||||
public matches: Match[];
|
||||
private internals: Wildcard[];
|
||||
private aplToFull: t.Node;
|
||||
constructor(internals: Wildcard[], aplToFull: t.Node) {
|
||||
this.matches = [];
|
||||
this.internals = internals;
|
||||
this.aplToFull = aplToFull;
|
||||
}
|
||||
|
||||
singleExprMatcher(
|
||||
code: TreeNode<t.Node>,
|
||||
aplTo: TreeNode<t.Node>
|
||||
): [TreeNode<PairedNodes> | undefined, MatchResult] {
|
||||
// If we are at start of ApplicableTo, start a new search on each of the child nodes
|
||||
if (aplTo.element === this.aplToFull) {
|
||||
// Perform a new search on all child nodes before trying to verify current node
|
||||
let temp = [];
|
||||
// If any matches bubble up from child nodes, we have to store it
|
||||
for (let code_child of code.children) {
|
||||
let [maybeChildMatch, matchResult] = this.singleExprMatcher(
|
||||
code_child,
|
||||
aplTo
|
||||
);
|
||||
if (maybeChildMatch) {
|
||||
temp.push(maybeChildMatch);
|
||||
}
|
||||
}
|
||||
// Store all full matches
|
||||
this.matches.push(
|
||||
...temp.map((x) => {
|
||||
return {
|
||||
statements: [x],
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
// Check if the current matches
|
||||
|
||||
let curMatches = this.checkCodeNode(code.element, aplTo.element);
|
||||
let pairedCurrent: TreeNode<PairedNodes> = new TreeNode(null, {
|
||||
codeNode: [code.element],
|
||||
aplToNode: aplTo.element,
|
||||
});
|
||||
if (curMatches === MatchResult.NoMatch) {
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
} else if (
|
||||
curMatches === MatchResult.MatchedWithWildcard ||
|
||||
curMatches === MatchResult.MatchedWithStarredWildcard
|
||||
) {
|
||||
return [pairedCurrent, curMatches];
|
||||
}
|
||||
// At this point current does match
|
||||
// Perform a search on each of the children of both AplTo and Code.
|
||||
|
||||
let i = 0;
|
||||
let aplToi = 0;
|
||||
while (aplToi < aplTo.children.length) {
|
||||
if (i >= code.children.length) {
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
}
|
||||
let [pairedChild, childResult] = this.singleExprMatcher(
|
||||
code.children[i],
|
||||
aplTo.children[aplToi]
|
||||
);
|
||||
|
||||
if (pairedChild === undefined) {
|
||||
// Failed to get a full match, so early return here
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
}
|
||||
|
||||
pairedChild.parent = pairedCurrent;
|
||||
pairedCurrent.children.push(pairedChild);
|
||||
if (childResult === MatchResult.MatchedWithStarredWildcard) {
|
||||
i += 1;
|
||||
while (i < code.children.length) {
|
||||
let [maybeChild, starChildResult] = this.singleExprMatcher(
|
||||
code.children[i],
|
||||
aplTo.children[aplToi]
|
||||
);
|
||||
if (
|
||||
starChildResult !=
|
||||
MatchResult.MatchedWithStarredWildcard ||
|
||||
maybeChild === undefined
|
||||
) {
|
||||
i -= 1;
|
||||
break;
|
||||
}
|
||||
pairedChild.element.codeNode.push(
|
||||
...maybeChild.element.codeNode
|
||||
);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
i += 1;
|
||||
aplToi += 1;
|
||||
}
|
||||
if (i < code.children.length) {
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
}
|
||||
// If we are here, a full match has been found
|
||||
return [pairedCurrent, curMatches];
|
||||
}
|
||||
|
||||
private checkCodeNode(codeNode: t.Node, aplToNode: t.Node): MatchResult {
|
||||
// First verify the internal DSL variables
|
||||
if (
|
||||
aplToNode.type === "ExpressionStatement" &&
|
||||
aplToNode.expression.type === "Identifier"
|
||||
) {
|
||||
aplToNode = aplToNode.expression;
|
||||
}
|
||||
if (aplToNode.type === "Identifier") {
|
||||
for (let wildcard of this.internals) {
|
||||
if (aplToNode.name === wildcard.identifier.name) {
|
||||
let visitorResult = WildcardEvalVisitor.visit(
|
||||
wildcard.expr,
|
||||
codeNode
|
||||
);
|
||||
if (visitorResult && wildcard.star) {
|
||||
return MatchResult.MatchedWithStarredWildcard;
|
||||
} else if (visitorResult) {
|
||||
return MatchResult.MatchedWithWildcard;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (codeNode.type != aplToNode.type) {
|
||||
return MatchResult.NoMatch;
|
||||
}
|
||||
|
||||
//If not an internal DSL variable, gotta verify that the identifier is the same
|
||||
if (codeNode.type === "Identifier" && aplToNode.type === "Identifier") {
|
||||
if (codeNode.name != aplToNode.name) {
|
||||
return MatchResult.NoMatch;
|
||||
}
|
||||
}
|
||||
|
||||
return codeNode.type === aplToNode.type
|
||||
? MatchResult.Matched
|
||||
: MatchResult.NoMatch;
|
||||
}
|
||||
|
||||
multiStatementMatcher(code: TreeNode<t.Node>, aplTo: TreeNode<t.Node>) {
|
||||
if (
|
||||
code.element.type === "Program" ||
|
||||
code.element.type === "BlockStatement"
|
||||
) {
|
||||
this.matchMultiHead(code.children, aplTo.children);
|
||||
}
|
||||
|
||||
for (let code_child of code.children) {
|
||||
this.multiStatementMatcher(code_child, aplTo);
|
||||
}
|
||||
}
|
||||
|
||||
matchMultiHead(code: TreeNode<t.Node>[], aplTo: TreeNode<t.Node>[]) {
|
||||
// Sliding window the size of aplTo
|
||||
for (let y = 0; y <= code.length - aplTo.length; y++) {
|
||||
let fullMatch = true;
|
||||
let statements: TreeNode<PairedNodes>[] = [];
|
||||
let aplToi = 0;
|
||||
let codei = 0;
|
||||
while (aplToi < aplTo.length && codei + y < code.length) {
|
||||
let [paired, matchResult] = this.exactExprMatcher(
|
||||
code[codei + y],
|
||||
aplTo[aplToi]
|
||||
);
|
||||
if (!paired) {
|
||||
fullMatch = false;
|
||||
break;
|
||||
}
|
||||
|
||||
if (matchResult === MatchResult.MatchedWithStarredWildcard) {
|
||||
codei += 1;
|
||||
while (codei + y < code.length) {
|
||||
let [next, nextMatchRes] = this.exactExprMatcher(
|
||||
code[codei + y],
|
||||
aplTo[aplToi]
|
||||
);
|
||||
if (
|
||||
!next ||
|
||||
nextMatchRes !==
|
||||
MatchResult.MatchedWithStarredWildcard
|
||||
) {
|
||||
codei -= 1;
|
||||
break;
|
||||
}
|
||||
paired.element.codeNode.push(...next.element.codeNode);
|
||||
codei += 1;
|
||||
}
|
||||
}
|
||||
|
||||
statements.push(paired);
|
||||
aplToi += 1;
|
||||
codei += 1;
|
||||
}
|
||||
if (aplToi !== aplTo.length) {
|
||||
fullMatch = false;
|
||||
}
|
||||
if (fullMatch) {
|
||||
this.matches.push({ statements });
|
||||
}
|
||||
}
|
||||
}
|
||||
exactExprMatcher(
|
||||
code: TreeNode<t.Node>,
|
||||
aplTo: TreeNode<t.Node>
|
||||
): [TreeNode<PairedNodes> | undefined, MatchResult] {
|
||||
let curMatches = this.checkCodeNode(code.element, aplTo.element);
|
||||
|
||||
if (curMatches === MatchResult.NoMatch) {
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
}
|
||||
|
||||
let paired: TreeNode<PairedNodes> = new TreeNode(null, {
|
||||
aplToNode: aplTo.element,
|
||||
codeNode: [code.element],
|
||||
});
|
||||
if (
|
||||
curMatches === MatchResult.MatchedWithStarredWildcard ||
|
||||
curMatches === MatchResult.MatchedWithWildcard
|
||||
) {
|
||||
return [paired, curMatches];
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
let aplToi = 0;
|
||||
while (i < code.children.length && aplToi < aplTo.children.length) {
|
||||
let [pairedChild, childResult] = this.exactExprMatcher(
|
||||
code.children[i],
|
||||
aplTo.children[aplToi]
|
||||
);
|
||||
if (!pairedChild) {
|
||||
// If child is not match the entire thing is not a match;
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
}
|
||||
|
||||
// This is a match, so we store it
|
||||
pairedChild.parent = paired;
|
||||
paired.children.push(pairedChild);
|
||||
|
||||
if (childResult === MatchResult.MatchedWithStarredWildcard) {
|
||||
i += 1;
|
||||
while (i < code.children.length) {
|
||||
let [maybeChild, starChildResult] = this.singleExprMatcher(
|
||||
code.children[i],
|
||||
aplTo.children[aplToi]
|
||||
);
|
||||
if (
|
||||
starChildResult !=
|
||||
MatchResult.MatchedWithStarredWildcard ||
|
||||
maybeChild === undefined
|
||||
) {
|
||||
i -= 1;
|
||||
break;
|
||||
}
|
||||
|
||||
pairedChild.element.codeNode.push(
|
||||
...maybeChild.element.codeNode
|
||||
);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
i += 1;
|
||||
aplToi += 1;
|
||||
}
|
||||
// Verify it is a full match
|
||||
if (aplToi < aplTo.children.length) {
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
}
|
||||
if (i < code.children.length) {
|
||||
return [undefined, MatchResult.NoMatch];
|
||||
}
|
||||
return [paired, curMatches];
|
||||
}
|
||||
}
|
61
src/matcher/wildcardEvaluator.ts
Normal file
61
src/matcher/wildcardEvaluator.ts
Normal file
|
@ -0,0 +1,61 @@
|
|||
import * as t from "@babel/types";
|
||||
import {
|
||||
BinaryExpr,
|
||||
GroupExpr,
|
||||
Identifier,
|
||||
TypeExpr,
|
||||
UnaryExpr,
|
||||
Wildcard,
|
||||
WildcardNode,
|
||||
WildcardParser,
|
||||
} from "../parser/parse";
|
||||
import { WildcardTokenizer } from "../parser/wildcardTokenizer";
|
||||
|
||||
export class WildcardEvalVisitor {
|
||||
static visit(node: WildcardNode, toComp: t.Node): boolean {
|
||||
switch (node.nodeType) {
|
||||
case "BinaryExpr": {
|
||||
let cur = node as BinaryExpr;
|
||||
let left = this.visit(cur.left, toComp);
|
||||
let right = this.visit(cur.right, toComp);
|
||||
if (cur.op === "&&") {
|
||||
return left && right;
|
||||
} else {
|
||||
return left || right;
|
||||
}
|
||||
}
|
||||
case "UnaryExpr": {
|
||||
let cur = node as UnaryExpr;
|
||||
return !this.visit(cur.expr, toComp);
|
||||
}
|
||||
case "GroupExpr": {
|
||||
let cur = node as GroupExpr;
|
||||
return this.visit(cur.expr, toComp);
|
||||
}
|
||||
case "Identifier": {
|
||||
let cur = node as Identifier;
|
||||
if (cur.name === "Expression") {
|
||||
return t.isExpression(toComp);
|
||||
} else if (cur.name === "Statement") {
|
||||
return t.isStatement(toComp);
|
||||
}
|
||||
return cur.name === toComp.type;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function testWildcardEval() {
|
||||
console.log(
|
||||
WildcardEvalVisitor.visit(
|
||||
new WildcardParser(
|
||||
new WildcardTokenizer(
|
||||
"statements:(Statement && !ReturnStatement)*"
|
||||
).tokenize()
|
||||
).parse().expr,
|
||||
t.variableDeclaration("let", [
|
||||
t.variableDeclarator(t.identifier("Id"), null),
|
||||
])
|
||||
)
|
||||
);
|
||||
}
|
305
src/parser/parse.ts
Normal file
305
src/parser/parse.ts
Normal file
|
@ -0,0 +1,305 @@
|
|||
import * as babelparser from "@babel/parser";
|
||||
|
||||
import * as t from "@babel/types";
|
||||
import { WildcardToken, WildcardTokenizer } from "./wildcardTokenizer";
|
||||
|
||||
export interface InternalParseResult {
|
||||
prelude: Wildcard[];
|
||||
cleanedJS: string;
|
||||
}
|
||||
|
||||
export function parseInternalTraTo(code: string): string {
|
||||
let cleanedJS = "";
|
||||
let temp = "";
|
||||
let flag = false;
|
||||
for (let i = 0; i < code.length; i++) {
|
||||
if (code[i] === "<" && code[i + 1] === "<") {
|
||||
// From now in we are inside of the DSL custom block
|
||||
flag = true;
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (flag && code[i] === ">" && code[i + 1] === ">") {
|
||||
// We encountered a closing tag
|
||||
flag = false;
|
||||
|
||||
cleanedJS += temp;
|
||||
|
||||
i += 1;
|
||||
temp = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (flag) {
|
||||
temp += code[i];
|
||||
} else {
|
||||
cleanedJS += code[i];
|
||||
}
|
||||
}
|
||||
return cleanedJS;
|
||||
}
|
||||
|
||||
export function parseInternalAplTo(code: string): InternalParseResult {
|
||||
let cleanedJS = "";
|
||||
let temp = "";
|
||||
let flag = false;
|
||||
let prelude: Wildcard[] = [];
|
||||
for (let i = 0; i < code.length; i++) {
|
||||
if (code[i] === "<" && code[i + 1] === "<") {
|
||||
// From now in we are inside of the DSL custom block
|
||||
flag = true;
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (flag && code[i] === ">" && code[i + 1] === ">") {
|
||||
// We encountered a closing tag
|
||||
flag = false;
|
||||
let wildcard = new WildcardParser(
|
||||
new WildcardTokenizer(temp).tokenize()
|
||||
).parse();
|
||||
//wildcard.identifier.name = "_" + wildcard.identifier.name + "_";
|
||||
cleanedJS += wildcard.identifier.name;
|
||||
|
||||
prelude.push(wildcard);
|
||||
i += 1;
|
||||
temp = "";
|
||||
continue;
|
||||
}
|
||||
|
||||
if (flag) {
|
||||
temp += code[i];
|
||||
} else {
|
||||
cleanedJS += code[i];
|
||||
}
|
||||
}
|
||||
return { prelude, cleanedJS };
|
||||
}
|
||||
export interface Identifier extends WildcardNode {
|
||||
nodeType: "Identifier";
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface Wildcard {
|
||||
nodeType: "Wildcard";
|
||||
identifier: Identifier;
|
||||
expr: TypeExpr;
|
||||
star: boolean;
|
||||
}
|
||||
|
||||
export interface WildcardNode {
|
||||
nodeType: "BinaryExpr" | "UnaryExpr" | "GroupExpr" | "Identifier";
|
||||
}
|
||||
|
||||
export type TypeExpr = BinaryExpr | UnaryExpr | PrimitiveExpr;
|
||||
|
||||
export type BinaryOperator = "||" | "&&";
|
||||
|
||||
export type UnaryOperator = "!";
|
||||
|
||||
export interface BinaryExpr extends WildcardNode {
|
||||
nodeType: "BinaryExpr";
|
||||
left: UnaryExpr | BinaryExpr | PrimitiveExpr;
|
||||
op: BinaryOperator;
|
||||
right: UnaryExpr | BinaryExpr | PrimitiveExpr;
|
||||
}
|
||||
export interface UnaryExpr extends WildcardNode {
|
||||
nodeType: "UnaryExpr";
|
||||
op: UnaryOperator;
|
||||
expr: PrimitiveExpr;
|
||||
}
|
||||
|
||||
export type PrimitiveExpr = GroupExpr | Identifier;
|
||||
|
||||
export interface GroupExpr extends WildcardNode {
|
||||
nodeType: "GroupExpr";
|
||||
expr: TypeExpr;
|
||||
}
|
||||
|
||||
export class WildcardParser {
|
||||
private position = -1;
|
||||
|
||||
constructor(private tokens: WildcardToken[]) {}
|
||||
private getCurrentToken() {
|
||||
// 1. Return the element of array `tokens` at the current position.
|
||||
return this.tokens[this.position];
|
||||
}
|
||||
|
||||
private advance(): void {
|
||||
// 1. Increment the value of `currentPosition` by 1.
|
||||
this.position += 1;
|
||||
}
|
||||
|
||||
private peek() {
|
||||
// 1. Return the element of array `tokens` at a position immediately after the current position.
|
||||
return this.tokens[this.position + 1];
|
||||
}
|
||||
|
||||
private error() {
|
||||
return new Error(
|
||||
"Parsing failed at position: " +
|
||||
this.position +
|
||||
". The erroneous input token is: " +
|
||||
this.getCurrentToken().value
|
||||
);
|
||||
}
|
||||
|
||||
parse(): Wildcard {
|
||||
return this.Wildcard();
|
||||
}
|
||||
|
||||
private Wildcard(): Wildcard {
|
||||
let identifier = this.Identifier();
|
||||
this.Semicolon();
|
||||
let multidenoted = this.TypeExpr();
|
||||
let star = this.Pluss();
|
||||
return {
|
||||
nodeType: "Wildcard",
|
||||
identifier,
|
||||
expr: multidenoted,
|
||||
star,
|
||||
};
|
||||
}
|
||||
|
||||
private Pluss(): boolean {
|
||||
if (this.peek() && this.peek().tokenKind === "Pluss") {
|
||||
this.advance();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private TypeExpr(): TypeExpr {
|
||||
if (this.peek().tokenKind === "UnaryOperator") {
|
||||
return this.UnaryExpr();
|
||||
} else {
|
||||
return this.BinaryExpr();
|
||||
}
|
||||
}
|
||||
|
||||
private BinaryExpr(): BinaryExpr | UnaryExpr | PrimitiveExpr {
|
||||
let left: UnaryExpr | BinaryExpr | PrimitiveExpr = this.UnaryExpr();
|
||||
while (this.peek() && this.peek().tokenKind === "BinaryOperator") {
|
||||
let op = this.BinaryOperator();
|
||||
let right = this.UnaryExpr();
|
||||
left = {
|
||||
nodeType: "BinaryExpr",
|
||||
left,
|
||||
op,
|
||||
right,
|
||||
};
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
private BinaryOperator(): BinaryOperator {
|
||||
if (this.peek().tokenKind === "BinaryOperator") {
|
||||
this.advance();
|
||||
return this.getCurrentToken().value as BinaryOperator;
|
||||
} else throw this.error();
|
||||
}
|
||||
|
||||
private UnaryExpr(): UnaryExpr | PrimitiveExpr {
|
||||
if (this.peek().tokenKind === "UnaryOperator") {
|
||||
let UnaryOperator = this.UnaryOperator();
|
||||
let expr = this.PrimitiveExpr();
|
||||
return {
|
||||
nodeType: "UnaryExpr",
|
||||
op: UnaryOperator,
|
||||
expr,
|
||||
};
|
||||
} else {
|
||||
return this.PrimitiveExpr();
|
||||
}
|
||||
}
|
||||
|
||||
private UnaryOperator(): UnaryOperator {
|
||||
if (this.peek().tokenKind === "UnaryOperator") {
|
||||
this.advance();
|
||||
return this.getCurrentToken().value as UnaryOperator;
|
||||
} else throw this.error();
|
||||
}
|
||||
|
||||
private PrimitiveExpr(): PrimitiveExpr {
|
||||
if (this.peek().tokenKind === "OpeningParenthesis") {
|
||||
return this.GroupExpr();
|
||||
} else {
|
||||
return this.Identifier();
|
||||
}
|
||||
}
|
||||
|
||||
private GroupExpr(): GroupExpr {
|
||||
this.OpeningParenthesis();
|
||||
let expr = this.TypeExpr();
|
||||
this.ClosingParenthesis();
|
||||
return {
|
||||
nodeType: "GroupExpr",
|
||||
expr,
|
||||
};
|
||||
}
|
||||
|
||||
private OpeningParenthesis() {
|
||||
if (this.peek().tokenKind === "OpeningParenthesis") {
|
||||
this.advance();
|
||||
} else throw this.error();
|
||||
}
|
||||
private ClosingParenthesis() {
|
||||
if (this.peek().tokenKind === "ClosingParenthesis") {
|
||||
this.advance();
|
||||
} else throw this.error();
|
||||
}
|
||||
|
||||
private Semicolon() {
|
||||
if (this.peek().tokenKind === "Semicolon") {
|
||||
this.advance();
|
||||
} else {
|
||||
throw this.error();
|
||||
}
|
||||
}
|
||||
private Identifier(): Identifier {
|
||||
if (this.peek().tokenKind === "Identifier") {
|
||||
this.advance();
|
||||
return {
|
||||
nodeType: "Identifier",
|
||||
name: this.getCurrentToken().value,
|
||||
};
|
||||
} else throw this.error();
|
||||
}
|
||||
}
|
||||
|
||||
export function parse_with_plugins(
|
||||
code: string
|
||||
): babelparser.ParseResult<t.File> {
|
||||
return babelparser.parse(code, {
|
||||
plugins: [
|
||||
["pipelineOperator", { proposal: "hack", topicToken: "%" }],
|
||||
"doExpressions",
|
||||
"topLevelAwait",
|
||||
],
|
||||
allowAwaitOutsideFunction: true,
|
||||
allowReturnOutsideFunction: true,
|
||||
allowUndeclaredExports: true,
|
||||
sourceType: "unambiguous",
|
||||
});
|
||||
}
|
||||
|
||||
function testParser() {
|
||||
console.dir(
|
||||
parseInternalAplTo(
|
||||
"<<someFunctionIdent:Identifier || MemberExpression>>(<<someFunctionParam: Expression || Identifier>>);"
|
||||
),
|
||||
{ depth: null }
|
||||
);
|
||||
|
||||
console.dir(
|
||||
parseInternalAplTo("<<SomeIdent: Statement && !ReturnStatement >>"),
|
||||
{
|
||||
depth: null,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
//testParser();
|
43
src/parser/preludeBuilder.ts
Normal file
43
src/parser/preludeBuilder.ts
Normal file
|
@ -0,0 +1,43 @@
|
|||
import { Wildcard, WildcardParser, parse_with_plugins } from "./parse";
|
||||
|
||||
import * as t from "@babel/types";
|
||||
import { WildcardTokenizer } from "./wildcardTokenizer";
|
||||
|
||||
export function preludeBuilder(prelude: string) {
|
||||
let parsedPrelude = parse_with_plugins(prelude).program.body;
|
||||
return extractValues(parsedPrelude);
|
||||
}
|
||||
|
||||
function extractValues(types: t.Statement[]): Wildcard[] {
|
||||
let prelude: Wildcard[] = [];
|
||||
for (let stmt of types) {
|
||||
if (stmt.type == "VariableDeclaration") {
|
||||
stmt = <t.VariableDeclaration>stmt;
|
||||
let declaration = stmt.declarations[0];
|
||||
let innerDSLVariableName = (declaration.id as t.Identifier).name;
|
||||
let init = declaration.init;
|
||||
if (init) {
|
||||
if (init.type == "StringLiteral") {
|
||||
init = <t.StringLiteral>init;
|
||||
prelude.push(
|
||||
new WildcardParser(
|
||||
new WildcardTokenizer(
|
||||
innerDSLVariableName + ":" + init
|
||||
).tokenize()
|
||||
).parse()
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
"Invalid usage of right side declaration in prelude"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
throw new Error("Empty wildcards are not legal");
|
||||
}
|
||||
} else {
|
||||
throw new Error("Usage of non VariableDeclaration in Prelude");
|
||||
}
|
||||
}
|
||||
|
||||
return prelude;
|
||||
}
|
132
src/parser/wildcardTokenizer.ts
Normal file
132
src/parser/wildcardTokenizer.ts
Normal file
|
@ -0,0 +1,132 @@
|
|||
type TokenKind =
|
||||
| "BinaryOperator"
|
||||
| "UnaryOperator"
|
||||
| "Identifier"
|
||||
| "OpeningParenthesis"
|
||||
| "ClosingParenthesis"
|
||||
| "Pluss"
|
||||
| "Semicolon";
|
||||
|
||||
export interface WildcardToken {
|
||||
tokenKind: TokenKind;
|
||||
value: string;
|
||||
}
|
||||
|
||||
export class WildcardTokenizer {
|
||||
private tokens: WildcardToken[] = [];
|
||||
private current = -1; // Have to start at -1 because first iteration advances
|
||||
private source: string[];
|
||||
constructor(source: string) {
|
||||
this.source = source.split("");
|
||||
}
|
||||
|
||||
tokenize(): WildcardToken[] {
|
||||
while (this.current < this.source.length - 1) {
|
||||
this.scanToken();
|
||||
}
|
||||
return this.tokens;
|
||||
}
|
||||
|
||||
private peek(): string | undefined {
|
||||
return this.source[this.current + 1];
|
||||
}
|
||||
private getCurrent() {
|
||||
return this.source[this.current];
|
||||
}
|
||||
private advance() {
|
||||
this.current += 1;
|
||||
}
|
||||
|
||||
private consumeToken(tokenKind: TokenKind, value: string) {
|
||||
this.tokens.push({ tokenKind, value });
|
||||
}
|
||||
|
||||
private scanToken() {
|
||||
this.advance();
|
||||
let char = this.getCurrent();
|
||||
switch (char) {
|
||||
case "(": {
|
||||
this.consumeToken("OpeningParenthesis", char);
|
||||
break;
|
||||
}
|
||||
case ")": {
|
||||
this.consumeToken("ClosingParenthesis", char);
|
||||
break;
|
||||
}
|
||||
case "|": {
|
||||
if (this.peek() === "|") {
|
||||
this.advance();
|
||||
this.consumeToken("BinaryOperator", "||");
|
||||
} else {
|
||||
throw new Error(
|
||||
"Invalid token given to tokenizer: " + char
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "!": {
|
||||
this.consumeToken("UnaryOperator", char);
|
||||
break;
|
||||
}
|
||||
case "&": {
|
||||
if (this.peek() === "&") {
|
||||
this.advance();
|
||||
this.consumeToken("BinaryOperator", "&&");
|
||||
} else {
|
||||
throw new Error(
|
||||
"Invalid token given to tokenizer: " + char
|
||||
);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "+": {
|
||||
this.consumeToken("Pluss", char);
|
||||
break;
|
||||
}
|
||||
case ":": {
|
||||
this.consumeToken("Semicolon", char);
|
||||
break;
|
||||
}
|
||||
case " ":
|
||||
break;
|
||||
default:
|
||||
if (this.isAlpha(char)) {
|
||||
this.consumeAlpha();
|
||||
break;
|
||||
} else {
|
||||
throw new Error("Invalid token given: " + char);
|
||||
}
|
||||
}
|
||||
}
|
||||
private consumeAlpha() {
|
||||
let word = "";
|
||||
|
||||
while (true) {
|
||||
word += this.getCurrent();
|
||||
|
||||
let next = this.peek();
|
||||
if (next && this.isAlpha(next)) {
|
||||
this.advance();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.consumeToken("Identifier", word);
|
||||
}
|
||||
private isAlpha(val: string): boolean {
|
||||
let alphabet = new Set(
|
||||
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_".split("")
|
||||
);
|
||||
return alphabet.has(val);
|
||||
}
|
||||
}
|
||||
|
||||
function testWildcardTokenizer() {
|
||||
let tokenized = new WildcardTokenizer(
|
||||
"aiaiai: ((LOL||!Smack)&&SomethingElse)*"
|
||||
).tokenize();
|
||||
|
||||
console.log(tokenized);
|
||||
}
|
||||
//testWildcardTokenizer();
|
|
@ -1,76 +0,0 @@
|
|||
import { Statement, VariableDeclarator } from "@swc/types";
|
||||
import { MatchStatement } from "../types";
|
||||
|
||||
export const from: MatchStatement = {
|
||||
type: "VariableDeclaration",
|
||||
span: {
|
||||
start: 1,
|
||||
end: 13,
|
||||
ctxt: 0,
|
||||
},
|
||||
kind: "var",
|
||||
declare: false,
|
||||
declarations: [
|
||||
{
|
||||
type: "VariableDeclarator",
|
||||
span: {
|
||||
start: 5,
|
||||
end: 12,
|
||||
ctxt: 0,
|
||||
},
|
||||
id: {
|
||||
type: "Identifier",
|
||||
span: {
|
||||
start: 5,
|
||||
end: 6,
|
||||
ctxt: 2,
|
||||
},
|
||||
value: "a",
|
||||
optional: false,
|
||||
},
|
||||
init: {
|
||||
type: "NumericLiteral",
|
||||
span: {
|
||||
start: 9,
|
||||
end: 12,
|
||||
ctxt: 0,
|
||||
},
|
||||
value: 100,
|
||||
raw: "100",
|
||||
},
|
||||
definite: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const to: VariableDeclarator[] = [
|
||||
{
|
||||
type: "VariableDeclarator",
|
||||
span: {
|
||||
start: 5,
|
||||
end: 12,
|
||||
ctxt: 0,
|
||||
},
|
||||
id: {
|
||||
type: "Identifier",
|
||||
span: {
|
||||
start: 5,
|
||||
end: 6,
|
||||
ctxt: 2,
|
||||
},
|
||||
value: "a",
|
||||
optional: false,
|
||||
},
|
||||
init: {
|
||||
type: "NumericLiteral",
|
||||
span: {
|
||||
start: 9,
|
||||
end: 12,
|
||||
ctxt: 0,
|
||||
},
|
||||
value: 100,
|
||||
raw: "100",
|
||||
},
|
||||
definite: false,
|
||||
},
|
||||
];
|
9
src/test/test_outputs/awaitToPromise_output.js
Normal file
9
src/test/test_outputs/awaitToPromise_output.js
Normal file
|
@ -0,0 +1,9 @@
|
|||
async function something() {
|
||||
let a = 100;
|
||||
a *= 100000;
|
||||
return fetch("https://uib.no").then(async uib => {
|
||||
a += 100000;
|
||||
a -= 1000;
|
||||
return [a, uib];
|
||||
});
|
||||
}
|
12
src/test/test_outputs/do_output.js
Normal file
12
src/test/test_outputs/do_output.js
Normal file
|
@ -0,0 +1,12 @@
|
|||
let aaaa = do {
|
||||
let g = 100;
|
||||
let ff = 10;
|
||||
let ggg = a(b);
|
||||
100
|
||||
};
|
||||
var bbaaa = do {
|
||||
let lllll = 1 + 1;
|
||||
100 + 100;
|
||||
const aaaaa = aaaa(bb);
|
||||
lllll
|
||||
};
|
4
src/test/test_outputs/pipeline_output.js
Normal file
4
src/test/test_outputs/pipeline_output.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
a |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%) |> w(%);
|
||||
a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, a |> b(%, b)))))));
|
||||
a |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a) |> b(%, a);
|
||||
b(b(b(b(a, a, a), a, a), a, a), a, a);
|
44
src/test/test_transform.test.ts
Normal file
44
src/test/test_transform.test.ts
Normal file
|
@ -0,0 +1,44 @@
|
|||
import { expect, test } from "bun:test";
|
||||
import { TransformRecipe, transform } from "../transform/transform";
|
||||
import { parseJSTQL } from "../langium/langiumRunner";
|
||||
|
||||
async function runTest(inputJS: string, inputJSTQL: string): Promise<string> {
|
||||
//transform(selfHostedTransformExampleMultiStmt, codeFromFile);
|
||||
const file = Bun.file(inputJS);
|
||||
const codeFromFile = await file.text();
|
||||
|
||||
const test_file = Bun.file(inputJSTQL);
|
||||
const test_JSTQL = await test_file.text();
|
||||
let proposals = await parseJSTQL(test_JSTQL);
|
||||
|
||||
let code = transform(proposals[0].cases, codeFromFile);
|
||||
return code;
|
||||
}
|
||||
let pipelineRes = await runTest(
|
||||
"test_files/pipeline_test.js",
|
||||
"dsl_files/pipeline.jstql"
|
||||
);
|
||||
let pipelineResFile = await Bun.file(
|
||||
"src/test/test_outputs/pipeline_output.js"
|
||||
).text();
|
||||
test("Test code: pipeline", () => {
|
||||
expect(pipelineRes).toEqual([pipelineResFile, 29]);
|
||||
});
|
||||
let doRes = await runTest("test_files/do_test.js", "dsl_files/do.jstql");
|
||||
|
||||
let doResFile = await Bun.file("src/test/test_outputs/do_output.js").text();
|
||||
test("Test code: do", () => {
|
||||
expect(doRes).toEqual([doResFile, 2]);
|
||||
});
|
||||
|
||||
let awaitToPromise = await runTest(
|
||||
"test_files/awaitToPromise.js",
|
||||
"dsl_files/awaitToPromise.jstql"
|
||||
);
|
||||
|
||||
let awaitToPromiseOutput = await Bun.file(
|
||||
"src/test/test_outputs/awaitToPromise_output.js"
|
||||
).text();
|
||||
test("Test code: await to promise", () => {
|
||||
expect(awaitToPromise).toEqual([awaitToPromiseOutput, 1]);
|
||||
});
|
|
@ -1 +0,0 @@
|
|||
var a = 100 + 100;
|
101
src/transform/transform.ts
Normal file
101
src/transform/transform.ts
Normal file
|
@ -0,0 +1,101 @@
|
|||
import traverse from "@babel/traverse";
|
||||
import * as t from "@babel/types";
|
||||
import generate from "@babel/generator";
|
||||
import {
|
||||
Wildcard,
|
||||
parseInternalAplTo,
|
||||
parseInternalTraTo,
|
||||
parse_with_plugins,
|
||||
} from "../parser/parse";
|
||||
import {
|
||||
TreeNode,
|
||||
makeTree,
|
||||
showTree,
|
||||
showTreePaired,
|
||||
} from "../data_structures/tree";
|
||||
import { runMatch } from "../matcher/matcher";
|
||||
import { transformMatch, transformer } from "./transformMatch";
|
||||
import { preludeBuilder } from "../parser/preludeBuilder";
|
||||
import * as babelparser from "@babel/parser";
|
||||
export interface Proposal {
|
||||
cases: TransformRecipe[];
|
||||
}
|
||||
|
||||
export interface TransformRecipe {
|
||||
applicableTo: string;
|
||||
transformTo: string;
|
||||
}
|
||||
export interface SelfHostedRecipe extends TransformRecipe {
|
||||
prelude: string;
|
||||
}
|
||||
export function transform(
|
||||
recipes: TransformRecipe[],
|
||||
code: string
|
||||
): [string, number] {
|
||||
let codeAST: t.Node = parse_with_plugins(code);
|
||||
let amount = 0;
|
||||
for (let recipe of recipes) {
|
||||
if ((<SelfHostedRecipe>recipe).prelude !== undefined) {
|
||||
// We are using the self hosted version
|
||||
let temp = transformSelfHosted(
|
||||
{
|
||||
applicableTo: recipe.applicableTo,
|
||||
transformTo: recipe.transformTo,
|
||||
},
|
||||
preludeBuilder((recipe as SelfHostedRecipe).prelude),
|
||||
codeAST
|
||||
);
|
||||
codeAST = temp[0];
|
||||
amount += temp[1];
|
||||
} else {
|
||||
// We are using JSTQL
|
||||
// We have to parse JSTQL to the self hosted version
|
||||
|
||||
let { cleanedJS: applicableTo, prelude } = parseInternalAplTo(
|
||||
recipe.applicableTo
|
||||
);
|
||||
let transformTo = parseInternalTraTo(recipe.transformTo);
|
||||
|
||||
let temp = transformSelfHosted(
|
||||
{ applicableTo, transformTo },
|
||||
prelude,
|
||||
codeAST
|
||||
);
|
||||
codeAST = temp[0];
|
||||
amount += temp[1];
|
||||
}
|
||||
}
|
||||
|
||||
let output = generate(codeAST, { topicToken: "%" }).code;
|
||||
//showTree(transformToTree);
|
||||
return [output, amount];
|
||||
}
|
||||
|
||||
function transformSelfHosted(
|
||||
recipe: TransformRecipe,
|
||||
internals: Wildcard[],
|
||||
codeAST: t.Node
|
||||
): [t.Node, number] {
|
||||
let codeTree = makeTree(codeAST as babelparser.ParseResult<t.File>);
|
||||
let applicabelToAST = parse_with_plugins(recipe.applicableTo);
|
||||
|
||||
let applicableToTree = makeTree(applicabelToAST);
|
||||
let transformTo = parse_with_plugins(recipe.transformTo);
|
||||
let transformToTree = makeTree(transformTo);
|
||||
|
||||
if (
|
||||
codeTree == undefined ||
|
||||
applicableToTree == undefined ||
|
||||
transformToTree == undefined
|
||||
) {
|
||||
throw new Error("This no worky LOL");
|
||||
}
|
||||
let matches = runMatch(codeTree, applicableToTree, internals);
|
||||
//showTreePaired(matches[0].statements[0]);
|
||||
//console.log("We found", matches.length, "matches");
|
||||
|
||||
let outputAST = transformer(matches, transformToTree, codeAST, transformTo);
|
||||
|
||||
//console.log("Finished transforming");
|
||||
return [outputAST, matches.length];
|
||||
}
|
145
src/transform/transformMatch.ts
Normal file
145
src/transform/transformMatch.ts
Normal file
|
@ -0,0 +1,145 @@
|
|||
import * as t from "@babel/types";
|
||||
|
||||
import * as babelparser from "@babel/parser";
|
||||
|
||||
import {
|
||||
TreeNode,
|
||||
makeTree,
|
||||
showTree,
|
||||
showTreePaired,
|
||||
} from "../data_structures/tree";
|
||||
import { Match, MatchedTreeNode, PairedNodes } from "../matcher/matcher";
|
||||
import traverse from "@babel/traverse";
|
||||
import generate from "@babel/generator";
|
||||
import { TransformRecipe } from "./transform";
|
||||
import { Wildcard } from "../parser/parse";
|
||||
|
||||
export function transformer(
|
||||
matches: Match[],
|
||||
transformTo: TreeNode<t.Node>,
|
||||
codeAST: t.Node,
|
||||
traToAST: t.File
|
||||
): t.Node {
|
||||
let transformedTransformTo: Map<t.Node, [t.File, Match]> = new Map();
|
||||
|
||||
for (let match of matches.reverse()) {
|
||||
try {
|
||||
let traToWithWildcards = structuredClone(traToAST);
|
||||
let wildcardMatches = extractWildcardPairs(match);
|
||||
|
||||
transformedTransformTo.set(
|
||||
match.statements[0].element.codeNode[0],
|
||||
[
|
||||
transformMatchFaster(wildcardMatches, traToWithWildcards),
|
||||
match,
|
||||
]
|
||||
);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
|
||||
traverse(codeAST, {
|
||||
enter(path) {
|
||||
if (!(path.node.type === "Program" || path.node.type === "File")) {
|
||||
if (transformedTransformTo.has(path.node)) {
|
||||
let [traToWithWildcards, match] =
|
||||
transformedTransformTo.get(path.node) as [
|
||||
t.File,
|
||||
Match
|
||||
];
|
||||
path.replaceWithMultiple(traToWithWildcards.program.body);
|
||||
let siblings = path.getAllNextSiblings();
|
||||
|
||||
// For multi line applicable to
|
||||
for (let i = 0; i < match.statements.length - 1; i++) {
|
||||
siblings[i].remove();
|
||||
}
|
||||
|
||||
// For when we have matched with +
|
||||
for (let matchStmt of match.statements) {
|
||||
for (let stmtMatchedWithPlus of matchStmt.element
|
||||
.codeNode) {
|
||||
let siblingnodes = siblings.map((a) => a.node);
|
||||
if (siblingnodes.includes(stmtMatchedWithPlus)) {
|
||||
let index =
|
||||
siblingnodes.indexOf(stmtMatchedWithPlus);
|
||||
siblings[index].remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
return codeAST;
|
||||
}
|
||||
|
||||
function transformMatchFaster(
|
||||
wildcardMatches: Map<string, t.Node[]>,
|
||||
transformTo: t.File
|
||||
): t.File {
|
||||
traverse(transformTo, {
|
||||
Identifier: (path) => {
|
||||
if (wildcardMatches.has(path.node.name)) {
|
||||
let toReplaceWith = wildcardMatches.get(path.node.name);
|
||||
if (toReplaceWith) {
|
||||
path.replaceWithMultiple(toReplaceWith);
|
||||
}
|
||||
}
|
||||
},
|
||||
ExpressionStatement: (path) => {
|
||||
if (path.node.expression.type === "Identifier") {
|
||||
let name = path.node.expression.name;
|
||||
if (wildcardMatches.has(name)) {
|
||||
let toReplaceWith = wildcardMatches.get(name);
|
||||
if (toReplaceWith) {
|
||||
path.replaceWithMultiple(toReplaceWith);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return transformTo;
|
||||
}
|
||||
|
||||
function extractWildcardPairs(match: Match): Map<string, t.Node[]> {
|
||||
let map: Map<string, t.Node[]> = new Map();
|
||||
|
||||
function recursiveSearch(node: TreeNode<PairedNodes>) {
|
||||
let name: null | string = null;
|
||||
if (node.element.aplToNode.type === "Identifier") {
|
||||
name = node.element.aplToNode.name;
|
||||
} else if (
|
||||
node.element.aplToNode.type === "ExpressionStatement" &&
|
||||
node.element.aplToNode.expression.type === "Identifier"
|
||||
) {
|
||||
name = node.element.aplToNode.expression.name;
|
||||
}
|
||||
|
||||
if (name) {
|
||||
if (map.has(name)) {
|
||||
throw new Error("Wildcard encountered twice!");
|
||||
}
|
||||
|
||||
map.set(name, node.element.codeNode);
|
||||
}
|
||||
|
||||
for (let child of node.children) {
|
||||
recursiveSearch(child);
|
||||
}
|
||||
}
|
||||
for (let stmt of match.statements) {
|
||||
recursiveSearch(stmt);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
function washName(name: string): string {
|
||||
return name;
|
||||
if (name.startsWith("_-_")) {
|
||||
return name.slice(3);
|
||||
}
|
||||
return name;
|
||||
}
|
105
src/types.ts
105
src/types.ts
|
@ -1,105 +0,0 @@
|
|||
import {
|
||||
BinaryExpression,
|
||||
BlockStatement,
|
||||
BreakStatement,
|
||||
ClassDeclaration,
|
||||
ContinueStatement,
|
||||
DebuggerStatement,
|
||||
Declaration,
|
||||
DoWhileStatement,
|
||||
EmptyStatement,
|
||||
Expression,
|
||||
ExpressionStatement,
|
||||
ForInStatement,
|
||||
ForOfStatement,
|
||||
ForStatement,
|
||||
FunctionDeclaration,
|
||||
IfStatement,
|
||||
LabeledStatement,
|
||||
ReturnStatement,
|
||||
Script,
|
||||
SwitchStatement,
|
||||
ThrowStatement,
|
||||
TryStatement,
|
||||
VariableDeclaration,
|
||||
WhileStatement,
|
||||
WithStatement,
|
||||
} from "@swc/core";
|
||||
|
||||
export type MatchStatement =
|
||||
| MatchBlockSatement
|
||||
| MatchEmptyStatement
|
||||
| MatchDebuggerStatement
|
||||
| MatchWithStatement
|
||||
| MatchReturnStatement
|
||||
| MatchLabeledStatement
|
||||
| MatchBreakStatement
|
||||
| MatchContinueStatement
|
||||
| matchIfStatement
|
||||
| MatchSwitchStatement
|
||||
| MatchThrowStatement
|
||||
| MatchTryStatement
|
||||
| MatchWhileStatement
|
||||
| MatchDoWhileStatement
|
||||
| MatchForInStatement
|
||||
| MatchForStatement
|
||||
| MatchForOfStatement
|
||||
| MatchDeclaration
|
||||
| MatchExpressionStatement;
|
||||
|
||||
type MatchDeclaration =
|
||||
| MatchClassDeclaration
|
||||
| MatchFunctionDeclaration
|
||||
| MatchVariableDeclaration;
|
||||
|
||||
export enum Transformation {
|
||||
ANYTHING,
|
||||
}
|
||||
|
||||
export interface MatchScript extends Script {}
|
||||
|
||||
export interface MatchBlockSatement extends BlockStatement {}
|
||||
|
||||
export interface MatchEmptyStatement extends EmptyStatement {}
|
||||
|
||||
export interface MatchDebuggerStatement extends DebuggerStatement {}
|
||||
|
||||
export interface MatchWithStatement extends WithStatement {}
|
||||
|
||||
export interface MatchReturnStatement extends ReturnStatement {}
|
||||
|
||||
export interface MatchLabeledStatement extends LabeledStatement {}
|
||||
|
||||
export interface MatchBreakStatement extends BreakStatement {}
|
||||
|
||||
export interface MatchContinueStatement extends ContinueStatement {}
|
||||
|
||||
export interface MatchSwitchStatement extends SwitchStatement {}
|
||||
|
||||
export interface MatchThrowStatement extends ThrowStatement {}
|
||||
|
||||
export interface MatchTryStatement extends TryStatement {}
|
||||
|
||||
export interface MatchWhileStatement extends WhileStatement {}
|
||||
|
||||
export interface MatchDoWhileStatement extends DoWhileStatement {}
|
||||
|
||||
export interface MatchForStatement extends ForStatement {}
|
||||
|
||||
export interface MatchForInStatement extends ForInStatement {}
|
||||
|
||||
export interface MatchForOfStatement extends ForOfStatement {}
|
||||
|
||||
export interface MatchExpressionStatement extends ExpressionStatement {}
|
||||
|
||||
export interface matchIfStatement extends IfStatement {}
|
||||
|
||||
export interface matchBinaryExpression extends BlockStatement {
|
||||
__stmts: Transformation;
|
||||
}
|
||||
|
||||
export interface MatchClassDeclaration extends ClassDeclaration {}
|
||||
|
||||
export interface MatchFunctionDeclaration extends FunctionDeclaration {}
|
||||
|
||||
export interface MatchVariableDeclaration extends VariableDeclaration {}
|
8
test_files/awaitToPromise.js
Normal file
8
test_files/awaitToPromise.js
Normal file
|
@ -0,0 +1,8 @@
|
|||
async function something() {
|
||||
let a = 100;
|
||||
a *= 100000;
|
||||
let uib = await fetch("https://uib.no");
|
||||
a += 100000;
|
||||
a -= 1000;
|
||||
return [a, uib];
|
||||
}
|
13
test_files/do_test.js
Normal file
13
test_files/do_test.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
let aaaa = () => {
|
||||
let g = 100;
|
||||
let ff = 10;
|
||||
let ggg = a(b);
|
||||
return 100;
|
||||
};
|
||||
|
||||
var bbaaa = (function () {
|
||||
let lllll = 1 + 1;
|
||||
100 + 100;
|
||||
const aaaaa = aaaa(bb);
|
||||
return lllll;
|
||||
})();
|
7
test_files/multi_stmt_test.js
Normal file
7
test_files/multi_stmt_test.js
Normal file
|
@ -0,0 +1,7 @@
|
|||
let a = LOOOOOOOOL();
|
||||
let b = (999 * 128) / 12;
|
||||
|
||||
const haha = () => {
|
||||
let a = LOOOOOOOOL();
|
||||
let b = (999 * 128) / 12;
|
||||
};
|
7
test_files/pipeline_test.js
Normal file
7
test_files/pipeline_test.js
Normal file
|
@ -0,0 +1,7 @@
|
|||
w(w(w(w(w(w(w(w(w(w(a))))))))));
|
||||
|
||||
b(a, b(a, b(a, b(a, b(a, b(a, b(a, b)))))));
|
||||
|
||||
b(b(b(b(b(b(b(b(b(b(b(b(a, a), a), a), a), a), a), a), a), a), a), a), a);
|
||||
|
||||
b(b(b(b(a, a, a), a, a), a, a), a, a);
|
8
test_files/single_stmt.js
Normal file
8
test_files/single_stmt.js
Normal file
|
@ -0,0 +1,8 @@
|
|||
let something = 1 + 1;
|
||||
let yikers = hahahah;
|
||||
|
||||
let lol = () => 100 + 100;
|
||||
|
||||
function haha() {
|
||||
let fhdsjkfhdsjkfhds = fjhdkslfjhdsklfjdskl;
|
||||
}
|
4
test_files/star_test.js
Normal file
4
test_files/star_test.js
Normal file
|
@ -0,0 +1,4 @@
|
|||
let x = () => {
|
||||
let b = 0;
|
||||
return b;
|
||||
};
|
108
test_files/test.js
Normal file
108
test_files/test.js
Normal file
|
@ -0,0 +1,108 @@
|
|||
function parse() {
|
||||
const input = document.getElementById("input").value;
|
||||
const data = input.slice(32);
|
||||
|
||||
const compressedData = decode_base64(data);
|
||||
const uncompressed = pako.inflate(compressedData, { to: "string" });
|
||||
const json = JSON.parse(uncompressed);
|
||||
console.log(json);
|
||||
|
||||
convertToDesktop(json);
|
||||
}
|
||||
|
||||
function convertToDesktop(json) {
|
||||
const newValues = {
|
||||
crb: false,
|
||||
newClanRaidClassId: 0,
|
||||
newClanRaidClassLevel: 0,
|
||||
pendingImmortalSouls: 0,
|
||||
pendingRaidRubies: 0,
|
||||
immortalSouls: 0,
|
||||
lastPurchaseTime: 0,
|
||||
lastRaidAttemptTimestamp: 0,
|
||||
lastRaidRewardCheckTimestamp: 0,
|
||||
shouldShowHZERoster: false,
|
||||
lastBonusRewardCheckTimestamp: 0,
|
||||
};
|
||||
|
||||
const mappedValues = {
|
||||
rubies: Math.round(json.rubies / 10),
|
||||
};
|
||||
|
||||
const pcSpecificValues = {
|
||||
readPatchNumber: "1.0e12",
|
||||
saveOrigin: "pc",
|
||||
};
|
||||
|
||||
const hash = "7a990d405d2c6fb93aa8fbb0ec1a3b23";
|
||||
const newData = {
|
||||
...newValues,
|
||||
...json,
|
||||
...mappedValues,
|
||||
...pcSpecificValues,
|
||||
};
|
||||
const compressed = pako.deflate(JSON.stringify(newData), { to: "string" });
|
||||
const base64 = btoa(compressed);
|
||||
|
||||
const finalSaveString = hash + base64;
|
||||
document.getElementById("output_output").innerText = finalSaveString;
|
||||
showOutput();
|
||||
}
|
||||
|
||||
function showOutput() {
|
||||
document.getElementById("outputs").style.visibility = "visible";
|
||||
}
|
||||
|
||||
function copyOutput() {
|
||||
const output = document.getElementById("output_output");
|
||||
output.disabled = false;
|
||||
output.focus();
|
||||
output.select();
|
||||
document.execCommand("copy");
|
||||
output.disabled = true;
|
||||
const successElement = document.getElementById("copy_success_msg");
|
||||
successElement.style.visibility = "visible";
|
||||
setTimeout(() => (successElement.style.visibility = "hidden"), 4000);
|
||||
}
|
||||
|
||||
function decode_base64(s) {
|
||||
let e = {},
|
||||
i,
|
||||
k,
|
||||
v = [],
|
||||
r = "",
|
||||
w = String.fromCharCode;
|
||||
let n = [
|
||||
[65, 91],
|
||||
[97, 123],
|
||||
[48, 58],
|
||||
[43, 44],
|
||||
[47, 48],
|
||||
];
|
||||
|
||||
for (z in n) {
|
||||
for (i = n[z][0]; i < n[z][1]; i++) {
|
||||
v.push(w(i));
|
||||
}
|
||||
}
|
||||
for (i = 0; i < 64; i++) {
|
||||
e[v[i]] = i;
|
||||
}
|
||||
|
||||
for (i = 0; i < s.length; i += 72) {
|
||||
let b = 0,
|
||||
c,
|
||||
x,
|
||||
l = 0,
|
||||
o = s.substring(i, i + 72);
|
||||
for (x = 0; x < o.length; x++) {
|
||||
c = e[o.charAt(x)];
|
||||
b = (b << 6) + c;
|
||||
l += 6;
|
||||
while (l >= 8) {
|
||||
r += w((b >>> (l -= 8)) % 256);
|
||||
}
|
||||
}
|
||||
}
|
||||
return r;
|
||||
}
|
208
test_files/test2.js
Normal file
208
test_files/test2.js
Normal file
|
@ -0,0 +1,208 @@
|
|||
let _geometry;
|
||||
|
||||
const _intersectPoint = /*@__PURE__*/ new Vector3();
|
||||
const _worldScale = /*@__PURE__*/ new Vector3();
|
||||
const _mvPosition = /*@__PURE__*/ new Vector3();
|
||||
|
||||
const _alignedPosition = /*@__PURE__*/ new Vector2();
|
||||
const _rotatedPosition = /*@__PURE__*/ new Vector2();
|
||||
const _viewWorldMatrix = /*@__PURE__*/ new Matrix4();
|
||||
|
||||
const _vA = /*@__PURE__*/ new Vector3();
|
||||
const _vB = /*@__PURE__*/ new Vector3();
|
||||
const _vC = /*@__PURE__*/ new Vector3();
|
||||
|
||||
const _uvA = /*@__PURE__*/ new Vector2();
|
||||
const _uvB = /*@__PURE__*/ new Vector2();
|
||||
const _uvC = /*@__PURE__*/ new Vector2();
|
||||
|
||||
class Sprite extends Object3D {
|
||||
constructor(material = new SpriteMaterial()) {
|
||||
super();
|
||||
|
||||
this.isSprite = true;
|
||||
|
||||
this.type = "Sprite";
|
||||
|
||||
if (_geometry === undefined) {
|
||||
_geometry = new BufferGeometry();
|
||||
|
||||
const float32Array = new Float32Array([
|
||||
-0.5, -0.5, 0, 0, 0, 0.5, -0.5, 0, 1, 0, 0.5, 0.5, 0, 1, 1,
|
||||
-0.5, 0.5, 0, 0, 1,
|
||||
]);
|
||||
|
||||
const interleavedBuffer = new InterleavedBuffer(float32Array, 5);
|
||||
|
||||
_geometry.setIndex([0, 1, 2, 0, 2, 3]);
|
||||
_geometry.setAttribute(
|
||||
"position",
|
||||
new InterleavedBufferAttribute(interleavedBuffer, 3, 0, false)
|
||||
);
|
||||
_geometry.setAttribute(
|
||||
"uv",
|
||||
new InterleavedBufferAttribute(interleavedBuffer, 2, 3, false)
|
||||
);
|
||||
}
|
||||
|
||||
this.geometry = _geometry;
|
||||
this.material = material;
|
||||
|
||||
this.center = new Vector2(0.5, 0.5);
|
||||
}
|
||||
|
||||
raycast(raycaster, intersects) {
|
||||
if (raycaster.camera === null) {
|
||||
console.error(
|
||||
'THREE.Sprite: "Raycaster.camera" needs to be set in order to raycast against sprites.'
|
||||
);
|
||||
}
|
||||
|
||||
_worldScale.setFromMatrixScale(this.matrixWorld);
|
||||
|
||||
_viewWorldMatrix.copy(raycaster.camera.matrixWorld);
|
||||
this.modelViewMatrix.multiplyMatrices(
|
||||
raycaster.camera.matrixWorldInverse,
|
||||
this.matrixWorld
|
||||
);
|
||||
|
||||
_mvPosition.setFromMatrixPosition(this.modelViewMatrix);
|
||||
|
||||
if (
|
||||
raycaster.camera.isPerspectiveCamera &&
|
||||
this.material.sizeAttenuation === false
|
||||
) {
|
||||
_worldScale.multiplyScalar(-_mvPosition.z);
|
||||
}
|
||||
|
||||
const rotation = this.material.rotation;
|
||||
let sin, cos;
|
||||
|
||||
if (rotation !== 0) {
|
||||
cos = Math.cos(rotation);
|
||||
sin = Math.sin(rotation);
|
||||
}
|
||||
|
||||
const center = this.center;
|
||||
|
||||
transformVertex(
|
||||
_vA.set(-0.5, -0.5, 0),
|
||||
_mvPosition,
|
||||
center,
|
||||
_worldScale,
|
||||
sin,
|
||||
cos
|
||||
);
|
||||
transformVertex(
|
||||
_vB.set(0.5, -0.5, 0),
|
||||
_mvPosition,
|
||||
center,
|
||||
_worldScale,
|
||||
sin,
|
||||
cos
|
||||
);
|
||||
transformVertex(
|
||||
_vC.set(0.5, 0.5, 0),
|
||||
_mvPosition,
|
||||
center,
|
||||
_worldScale,
|
||||
sin,
|
||||
cos
|
||||
);
|
||||
|
||||
_uvA.set(0, 0);
|
||||
_uvB.set(1, 0);
|
||||
_uvC.set(1, 1);
|
||||
|
||||
// check first triangle
|
||||
let intersect = raycaster.ray.intersectTriangle(
|
||||
_vA,
|
||||
_vB,
|
||||
_vC,
|
||||
false,
|
||||
_intersectPoint
|
||||
);
|
||||
|
||||
if (intersect === null) {
|
||||
// check second triangle
|
||||
transformVertex(
|
||||
_vB.set(-0.5, 0.5, 0),
|
||||
_mvPosition,
|
||||
center,
|
||||
_worldScale,
|
||||
sin,
|
||||
cos
|
||||
);
|
||||
_uvB.set(0, 1);
|
||||
|
||||
intersect = raycaster.ray.intersectTriangle(
|
||||
_vA,
|
||||
_vC,
|
||||
_vB,
|
||||
false,
|
||||
_intersectPoint
|
||||
);
|
||||
if (intersect === null) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const distance = raycaster.ray.origin.distanceTo(_intersectPoint);
|
||||
|
||||
if (distance < raycaster.near || distance > raycaster.far) return;
|
||||
|
||||
intersects.push({
|
||||
distance: distance,
|
||||
point: _intersectPoint.clone(),
|
||||
uv: Triangle.getInterpolation(
|
||||
_intersectPoint,
|
||||
_vA,
|
||||
_vB,
|
||||
_vC,
|
||||
_uvA,
|
||||
_uvB,
|
||||
_uvC,
|
||||
new Vector2()
|
||||
),
|
||||
face: null,
|
||||
object: this,
|
||||
});
|
||||
}
|
||||
|
||||
copy(source, recursive) {
|
||||
super.copy(source, recursive);
|
||||
|
||||
if (source.center !== undefined) this.center.copy(source.center);
|
||||
|
||||
this.material = source.material;
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
function transformVertex(vertexPosition, mvPosition, center, scale, sin, cos) {
|
||||
// compute position in camera space
|
||||
_alignedPosition
|
||||
.subVectors(vertexPosition, center)
|
||||
.addScalar(0.5)
|
||||
.multiply(scale);
|
||||
|
||||
// to check if rotation is not zero
|
||||
if (sin !== undefined) {
|
||||
_rotatedPosition.x =
|
||||
cos * _alignedPosition.x - sin * _alignedPosition.y;
|
||||
_rotatedPosition.y =
|
||||
sin * _alignedPosition.x + cos * _alignedPosition.y;
|
||||
} else {
|
||||
_rotatedPosition.copy(_alignedPosition);
|
||||
}
|
||||
|
||||
vertexPosition.copy(mvPosition);
|
||||
vertexPosition.x += _rotatedPosition.x;
|
||||
vertexPosition.y += _rotatedPosition.y;
|
||||
|
||||
// transform to world space
|
||||
vertexPosition.applyMatrix4(_viewWorldMatrix);
|
||||
}
|
||||
|
||||
export { Sprite };
|
|
@ -1,13 +1,15 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"rootDir": "src",
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"sourceMap": true,
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"types": ["bun-types"]
|
||||
}
|
||||
}
|
||||
"compilerOptions": {
|
||||
"rootDir": "src",
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"target": "es2017",
|
||||
"module": "commonjs",
|
||||
"sourceMap": true,
|
||||
"esModuleInterop": true,
|
||||
"moduleResolution": "node",
|
||||
"types": ["bun-types"]
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["babel"]
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue