Skip to content
This repository was archived by the owner on May 30, 2023. It is now read-only.

Commit 211c3b8

Browse files
author
Zombi
committed
Upgrade parser to ts and running file produces file.tokens.json and file.ast_tree.json
1 parent 620933b commit 211c3b8

19 files changed

+444
-379
lines changed

.gitignore

-2
Original file line numberDiff line numberDiff line change
@@ -1,2 +0,0 @@
1-
bin
2-
obj

.vscode/settings.json

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
{
2+
"deno.enable": true
3+
}

Soup.cs

-33
This file was deleted.

Soup.csproj

-6
This file was deleted.

Soup.ts

+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import { Tokenize } from "./lexer/lexer.ts";
2+
import Parser from "./parser/parser.ts";
3+
4+
const filecontents = Deno.readTextFileSync(Deno.args[0]);
5+
const src = filecontents.split("");
6+
const parser = new Parser();
7+
const Tokens = Tokenize(src)
8+
9+
for (let index = 0; index < Tokens.length; index++) {
10+
const item = Tokens[index];
11+
console.log("Value: '"+item.T_Value + "' Token: " + item.T_Type);
12+
13+
}
14+
15+
const program = parser.produceAST(src)
16+
console.log(program)
17+
18+
Deno.writeTextFileSync(Deno.args[0].replaceAll(".sp", ".tokens.json"), JSON.stringify(Tokens, null, 3));
19+
Deno.writeTextFileSync(Deno.args[0].replaceAll(".sp", ".ast_tree.json"), JSON.stringify(program, null, 3));

deno.jsonc

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
{
2+
"tasks": {
3+
"dev": "deno run --watch main.ts"
4+
}
5+
}

lexer/lexer.cs

-88
This file was deleted.

lexer/lexer.ts

+81
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
import { TokenTypes, Token, BuildToken, IsKeyword, IsAlphabetical, IsNumerical, IsOneCharToken, IsSkippable } from "./tokens.ts";
2+
3+
export function Tokenize(chars: string[]){
4+
5+
let EOF = 0;
6+
let tokens = new Array<Token>;
7+
let ip = 0;
8+
while (ip <= chars.length-1){
9+
10+
if (EOF){
11+
break;
12+
}
13+
14+
if (IsOneCharToken(chars[ip])){
15+
16+
if (chars[ip] == "("){tokens=BuildToken(tokens, chars[ip], TokenTypes.LParen);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
17+
if (chars[ip] == ")"){tokens=BuildToken(tokens, chars[ip], TokenTypes.RParen);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
18+
if (chars[ip] == "{"){tokens=BuildToken(tokens, chars[ip], TokenTypes.LBrace);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
19+
if (chars[ip] == "}"){tokens=BuildToken(tokens, chars[ip], TokenTypes.RBrace);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
20+
if (chars[ip] == "<"){tokens=BuildToken(tokens, chars[ip], TokenTypes.LArrow);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
21+
if (chars[ip] == ">"){tokens=BuildToken(tokens, chars[ip], TokenTypes.RArrow);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
22+
if (chars[ip] == "["){tokens=BuildToken(tokens, chars[ip], TokenTypes.LBracket);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
23+
if (chars[ip] == "]"){tokens=BuildToken(tokens, chars[ip], TokenTypes.RBracket);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
24+
if (chars[ip] == "="){tokens=BuildToken(tokens, chars[ip], TokenTypes.Equals);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
25+
if (chars[ip] == "+"){tokens=BuildToken(tokens, chars[ip], TokenTypes.BinaryExpression);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
26+
if (chars[ip] == "-"){tokens=BuildToken(tokens, chars[ip], TokenTypes.BinaryExpression);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
27+
if (chars[ip] == "*"){tokens=BuildToken(tokens, chars[ip], TokenTypes.BinaryExpression);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
28+
if (chars[ip] == "%"){tokens=BuildToken(tokens, chars[ip], TokenTypes.BinaryExpression);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
29+
if (chars[ip] == "/"){tokens=BuildToken(tokens, chars[ip], TokenTypes.BinaryExpression);if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}}
30+
31+
}else{
32+
33+
if (IsNumerical(chars[ip])){
34+
35+
let Number = "";
36+
while (IsNumerical(chars[ip])){
37+
38+
Number += chars[ip];
39+
if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}
40+
41+
}
42+
tokens = BuildToken(tokens, Number, TokenTypes.Numeral);
43+
44+
}
45+
46+
if (IsAlphabetical(chars[ip])){
47+
48+
let String = "";
49+
while (IsAlphabetical(chars[ip])){
50+
String += chars[ip];
51+
if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}
52+
}
53+
const tf = IsKeyword(String)[0];
54+
const k = IsKeyword(String)[1];
55+
if (tf){
56+
tokens = BuildToken(tokens, String, k);
57+
}else{
58+
tokens = BuildToken(tokens, String, TokenTypes.Identifier);
59+
}
60+
61+
}
62+
63+
64+
if (IsSkippable(chars[ip])){
65+
if (ip+1>(chars.length-1)){EOF=1; break;}else{ip++;}
66+
}
67+
68+
if (!IsAlphabetical(chars[ip]) == !IsNumerical(chars[ip]) == !IsOneCharToken(chars[ip]) == !IsSkippable(chars[ip])){
69+
console.log("Char That Cannot Be Handeled found in src -> { "+chars[ip]+" }");
70+
Deno.exit(2);
71+
}
72+
73+
}
74+
75+
}
76+
77+
tokens = BuildToken(tokens, "EOF", TokenTypes.EOF)
78+
79+
return tokens;
80+
81+
}

lexer/tokens.cs

-144
This file was deleted.

0 commit comments

Comments
 (0)