Skip to content
This repository has been archived by the owner on Apr 20, 2023. It is now read-only.

Commit

Permalink
Merge pull request #10 from scholtzm/remove-process-chdir-from-lang-p…
Browse files Browse the repository at this point in the history
…arser

Remove usage of process.chdir from language parser
  • Loading branch information
DoctorMcKay authored Oct 16, 2017
2 parents e0268d4 + 95391aa commit 5d5798d
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 49 deletions.
9 changes: 2 additions & 7 deletions steam-resources/steam_language_parser/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,12 @@ var fs = require('fs');
var token_analyzer = require('./parser/token_analyzer');

var codeGen = require('./generator/node_gen');
var languagePath = require('path').join(__dirname, '../steam_language');
var filePath = require('path').join(__dirname, '../steam_language/steammsg.steamd');

var cwd = process.cwd();
process.chdir(languagePath);

var tokenList = require('./parser/language_parser').tokenizeString(fs.readFileSync('steammsg.steamd', { encoding: 'ascii' }));
var tokenList = require('./parser/language_parser').tokenizeString(fs.readFileSync(filePath, { encoding: 'ascii' }));

var root = token_analyzer.analyze(tokenList);

process.chdir(cwd);

var rootEnumNode = new token_analyzer.Node();
var rootMessageNode = new token_analyzer.Node();

Expand Down
85 changes: 43 additions & 42 deletions steam-resources/steam_language_parser/parser/token_analyzer.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
var path = require('path');
var language_parser = require('./language_parser');
var lookupSymbol = require('./symbol_locator').lookupSymbol;

Expand All @@ -20,21 +21,21 @@ exports.EnumNode = function() {

exports.analyze = function(tokens) {
var root = new exports.Node();

while (tokens.length > 0) {
var cur = tokens.shift();

switch (cur.name) {
case 'EOF':
break;
case 'preprocess':
var text = expect(tokens, 'string');

if (cur.value == 'import') {
var parentTokens = language_parser.tokenizeString(require('fs').readFileSync(text.value, { encoding: 'ascii' }));
var parentTokens = language_parser.tokenizeString(require('fs').readFileSync(path.join(__dirname, '../../steam_language', text.value), { encoding: 'ascii' }));

var newRoot = exports.analyze(parentTokens);

newRoot.childNodes.forEach(function(child) {
root.childNodes.push(child);
});
Expand All @@ -47,29 +48,29 @@ exports.analyze = function(tokens) {
{
name = expect(tokens, 'identifier');
var ident = null, parent = null;

op1 = optional(tokens, 'operator', '<');
if (op1) {
ident = expect(tokens, 'identifier');
op2 = expect(tokens, 'operator', '>');
}

var expects = optional(tokens, 'identifier', 'expects');
if (expects) {
parent = expect(tokens, 'identifier');
}

var cnode = new exports.ClassNode();
cnode.name = name.value;

if (ident) {
cnode.ident = lookupSymbol(root, ident.value, false);
}

if (parent) {
//cnode.parent = lookupSymbol(root, parent.value, true);
}

root.childNodes.push(cnode);
parseInnerScope(tokens, cnode, root);
}
Expand All @@ -78,27 +79,27 @@ exports.analyze = function(tokens) {
{
name = expect(tokens, 'identifier');
var datatype = null;

op1 = optional(tokens, 'operator', '<');
if (op1) {
datatype = expect(tokens, 'identifier');
op2 = expect(tokens, 'operator', '>');
}

var flag = optional(tokens, 'identifier', 'flags');

var enode = new exports.EnumNode();
enode.name = name.value;

if (flag) {
enode.flags = flag.value;
}

if (datatype) {
enode.type = lookupSymbol(root, datatype.value, false);
}


root.childNodes.push(enode);
parseInnerScope(tokens, enode, root);
}
Expand All @@ -107,32 +108,32 @@ exports.analyze = function(tokens) {
break;
}
}

return root;
};

function parseInnerScope(tokens, parent, root) {
var scope1 = expect(tokens, 'operator', '{');
var scope2 = optional(tokens, 'operator', '}');

while (!scope2) {
var pnode = new exports.PropNode();

var t1 = tokens.shift();

var t1op1 = optional(tokens, 'operator', '<');
var flagop = null;

if (t1op1) {
flagop = expect(tokens, 'identifier');
var t1op2 = expect(tokens, 'operator', '>');

pnode.flagsOpt = flagop.value;
}

var t2 = optional(tokens, 'identifier');
var t3 = optional(tokens, 'identifier');

if (t3) {
pnode.name = t3.value;
pnode.type = lookupSymbol(root, t2.value, false);
Expand All @@ -143,64 +144,64 @@ function parseInnerScope(tokens, parent, root) {
} else {
pnode.name = t1.value;
}

var defop = optional(tokens, 'operator', '=');

if (defop) {
while (true) {
var value = tokens.shift();
pnode.default.push(lookupSymbol(root, value.value, false));

if (optional(tokens, 'operator', '|'))
continue;

expect(tokens, 'terminator', ';');
break;
}
} else {
expect(tokens, 'terminator', ';');
}

var obsolete = optional(tokens, 'identifier', 'obsolete');
if (obsolete) {
pnode.obsolete = '';

var obsoleteReason = optional(tokens, 'string');

if (obsoleteReason)
pnode.obsolete = obsoleteReason.value;
}

parent.childNodes.push(pnode);

scope2 = optional(tokens, 'operator', '}');
}
}

function expect(tokens, name, value) {
var peek = tokens[0];

if (!peek) {
return language_parser.Token('EOF', '');
}

if (peek.name != name || value && peek.value != value) {
throw new Error("Expecting " + name);
}

return tokens.shift();
}

function optional(tokens, name, value) {
var peek = tokens[0];

if (!peek) {
return new language_parser.Token('EOF', '');
}

if (peek.name != name || value && peek.value != value) {
return null;
}

return tokens.shift();
}

0 comments on commit 5d5798d

Please sign in to comment.