Skip to content

Commit

Permalink
fix highlighting error for identifiers with special meaning
Browse files Browse the repository at this point in the history
- import & module
- close SonarOpenCommunity#2197
- close SonarOpenCommunity#2192
  • Loading branch information
guwirth committed Jun 18, 2021
1 parent ac8f4d0 commit 6bf0302
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 67 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
* **A.12 Preprocessing directives [gram.cpp]**
*
* preprocessing-file:
* groupopt
* groupopt
* module-file
*
* module-file:
Expand Down Expand Up @@ -187,7 +187,7 @@ public void init() {
globalMacros = new MapChain<>();
globalMacros.putAll(unitMacros);

if(LOG.isDebugEnabled()) {
if (LOG.isDebugEnabled()) {
LOG.debug("global include directories: {}", unitCodeProvider.getIncludeRoots());
LOG.debug("global macros: {}", globalMacros);
}
Expand All @@ -197,7 +197,7 @@ public void init() {

// add unit specific stuff
boolean changes = addUnitIncludeDirectories(path);
if (changes && LOG.isDebugEnabled() ) {
if (changes && LOG.isDebugEnabled()) {
LOG.debug("unit include directories: {}", unitCodeProvider.getIncludeRoots());
}
changes = addUnitMacros(path);
Expand Down Expand Up @@ -282,7 +282,7 @@ private PreprocessorAction handlePreprocessorDirective(Token token, String rootF
// ignore all other preprocessor directives (which are not handled explicitly) and strip them from the stream
return oneConsumedToken(token);
}
}
}

public static void finalReport() {
if (missingIncludeFilesCounter != 0) {
Expand Down Expand Up @@ -541,13 +541,13 @@ private static Macro parseMacroDefinition(AstNode defineLineAst) {
var identifier = vaargs.getFirstChild(IDENTIFIER);
macroParams.add(identifier == null
? Token.builder()
.setLine(vaargs.getToken().getLine())
.setColumn(vaargs.getToken().getColumn())
.setURI(vaargs.getToken().getURI())
.setValueAndOriginalValue("__VA_ARGS__")
.setType(IDENTIFIER)
.setGeneratedCode(true)
.build()
.setLine(vaargs.getToken().getLine())
.setColumn(vaargs.getToken().getColumn())
.setURI(vaargs.getToken().getURI())
.setValueAndOriginalValue("__VA_ARGS__")
.setType(IDENTIFIER)
.setGeneratedCode(true)
.build()
: identifier.getToken());
}

Expand Down Expand Up @@ -658,15 +658,16 @@ private boolean addUnitMacros(String level) {
}

private void addGlobalIncludeDirectories() {
globalIncludeDirectories = squidConfig.getValues(CxxSquidConfiguration.GLOBAL, CxxSquidConfiguration.INCLUDE_DIRECTORIES);
unitCodeProvider.setIncludeRoots(globalIncludeDirectories,squidConfig.getBaseDir());
globalIncludeDirectories = squidConfig.getValues(CxxSquidConfiguration.GLOBAL,
CxxSquidConfiguration.INCLUDE_DIRECTORIES);
unitCodeProvider.setIncludeRoots(globalIncludeDirectories, squidConfig.getBaseDir());
}

private boolean addUnitIncludeDirectories(String level) {
List<String> unitIncludeDirectories = squidConfig.getLevelValues(level, CxxSquidConfiguration.INCLUDE_DIRECTORIES);
boolean hasUnitIncludes = !unitIncludeDirectories.isEmpty();
unitIncludeDirectories.addAll(globalIncludeDirectories);
unitCodeProvider.setIncludeRoots(unitIncludeDirectories,squidConfig.getBaseDir());
unitCodeProvider.setIncludeRoots(unitIncludeDirectories, squidConfig.getBaseDir());
return hasUnitIncludes;
}

Expand Down Expand Up @@ -799,7 +800,7 @@ private static void expandVaOpt(List<Token> tokens, boolean keep) {
if (firstIndex > 0 && lastIndex < tokens.size()) {
if (keep) {
// keep pp-tokensopt, remove ) and __VA_OPT__ (
tokens.subList(lastIndex, lastIndex+1).clear();
tokens.subList(lastIndex, lastIndex + 1).clear();
tokens.subList(0, firstIndex).clear();
} else {
// remove from body: __VA_OPT__ ( pp-tokensopt )
Expand All @@ -819,8 +820,8 @@ private List<Token> replaceParams(List<Token> body, List<Token> parameters, List

// container to search parameter by name
var paramterIndex = new HashMap<String, Integer>();
for(var index=0; index<parameters.size(); index++) {
paramterIndex.put(parameters.get(index).getValue(),index);
for (var index = 0; index < parameters.size(); index++) {
paramterIndex.put(parameters.get(index).getValue(), index);
}

for (var i = 0; i < body.size(); ++i) {
Expand Down Expand Up @@ -1017,18 +1018,18 @@ private File findIncludedFile(AstNode ast, Token token, String currFileName) {
return null;
}

void handleConstantExpression(AstNode ast,Token token, String filename){
void handleConstantExpression(AstNode ast, Token token, String filename) {
try {
unitCodeProvider.skipBlock(false);
boolean result = ExpressionEvaluator.eval(this, ast.getFirstDescendant(CppGrammarImpl.constantExpression));
unitCodeProvider.expressionWas(result);
unitCodeProvider.skipBlock(!result);
} catch (EvaluationException e) {
LOG.error("[{}:{}]: error evaluating the expression {} assume 'true' ...",
filename, token.getLine(), token.getValue());
unitCodeProvider.expressionWas(true);
unitCodeProvider.skipBlock(false);
}
} catch (EvaluationException e) {
LOG.error("[{}:{}]: error evaluating the expression {} assume 'true' ...",
filename, token.getLine(), token.getValue());
unitCodeProvider.expressionWas(true);
unitCodeProvider.skipBlock(false);
}
}

PreprocessorAction handleIfLine(AstNode ast, Token token, String filename) {
Expand Down Expand Up @@ -1105,7 +1106,8 @@ PreprocessorAction handleIncludeLine(AstNode ast, Token token, String filename,
File includedFile = findIncludedFile(ast, token, filename);
if (includedFile == null) {
missingIncludeFilesCounter++;
LOG.debug("[" + filename + ":" + token.getLine() + "]: preprocessor cannot find include file '" + token.getValue() + "'");
LOG.debug("[" + filename + ":" + token.getLine()
+ "]: preprocessor cannot find include file '" + token.getValue() + "'");
} else if (analysedFiles.add(includedFile.getAbsoluteFile())) {
unitCodeProvider.pushFileState(includedFile);
try {
Expand All @@ -1122,63 +1124,45 @@ PreprocessorAction handleIncludeLine(AstNode ast, Token token, String filename,
}

PreprocessorAction handleImportLine(AstNode ast, Token token, String filename, Charset charset) {
if (ast.getFirstDescendant(CppGrammarImpl.expandedIncludeBody) != null) {
if (ast.getFirstDescendant(CppGrammarImpl.expandedIncludeBody) != null) {
// import <file>
return handleIncludeLine(ast, token, filename, charset);
}

// forward to parser: ... import ...
return mapModuleTokens(ast, token);
return mapFromCppToCxx(ast, token);
}

PreprocessorAction handleModuleLine(AstNode ast, Token token) {
// forward to parser: ... module ...
return mapModuleTokens(ast, token);
return mapFromCppToCxx(ast, token);
}

PreprocessorAction mapModuleTokens(AstNode ast, Token token) {
PreprocessorAction mapFromCppToCxx(AstNode ast, Token token) {
List<Token> replTokens = new ArrayList<>();
for (Token ppToken : stripEOF(serialize(ast))) {
String value = ppToken.getValue();
var type = ppToken.getType();
var newToken = ppToken;
var convert = true;

// identifier with special meaning?
// if (type.equals(IDENTIFIER)) {
// if (value.equals(CppSpecialIdentifier.MODULE.getValue())) {
// type = CppSpecialIdentifier.MODULE;
// convert = false;
// } else if (value.equals(CppSpecialIdentifier.IMPORT.getValue())) {
// type = CppSpecialIdentifier.IMPORT;
// convert = false;
// } else if (value.equals(CppSpecialIdentifier.EXPORT.getValue())) {
// type = CppSpecialIdentifier.EXPORT;
// convert = false;
// }
// }

// convert pp token to cxx token
if (convert) {
if (!value.isBlank()) {
// call CXX lexer to create a CXX token
List<Token> cxxTokens = CxxLexer.create().lex(value);
newToken = cxxTokens.get(0);
type = newToken.getType();
}

if (!type.equals(EOF)) {
newToken = Token.builder()
.setLine(token.getLine())
.setColumn(ppToken.getColumn())
.setURI(ppToken.getURI())
.setValueAndOriginalValue(ppToken.getValue())
.setType(type)
.build();

replTokens.add(newToken);
var cxxToken = cxxTokens.get(0);
var cxxType = cxxToken.getType();

if (!cxxType.equals(EOF)) {
cxxToken = Token.builder()
.setLine(token.getLine() + ppToken.getLine() - 1)
.setColumn(token.getColumn() + ppToken.getColumn())
.setURI(ppToken.getURI())
.setValueAndOriginalValue(ppToken.getValue())
.setType(cxxType)
.build();

replTokens.add(cxxToken);
}
}
}

return new PreprocessorAction(1, Collections.singletonList(Trivia.createSkippedText(token)),replTokens);
return new PreprocessorAction(1, Collections.singletonList(Trivia.createPreprocessingToken(token)), replTokens);
}

PreprocessorAction handleUndefLine(AstNode ast, Token token) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,16 @@ public void preprocessDirective() {
checkOnRange(20, 0, 7, TypeOfText.PREPROCESS_DIRECTIVE); // #define
}

@Test
@SuppressWarnings("squid:S2699") // ... checkOnRange contains the assertion
public void identifiersWithSpecialMeaning() {
// identifier with special meaning => no highlighting
checkOnRange(112, 11, 6, null); // import
checkOnRange(119, 10, 6, null); // module
checkOnRange(120, 12, 6, null); // module
checkOnRange(122, 13, 6, null); // module
}

/**
* Checks the highlighting of a range of columns. The first column of a line has index 0. The range is the columns of
* the token.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,11 @@ void test2(const char* sourceFilename)
void test3()
{
const char *t1 = "..."; // UTF-8 encoded
const char *t2 = u8"..."; // UTF-8 encoded
const char *t2 = u8"..."; // UTF-8 encoded
const wchar_t *t3 = L"..."; // Wide string
const char16_t *t4 = u"..."; // UTF-16 encoded
const char32_t *t5 = U"..."; // UTF-32 encoded

const char *t6 = "hello" " world";
const wchar_t *t7 = u"" "hello world";
const wchar_t *t8 = /*comment1*/ u"" /*comment2*/ "hello world" /*comment3*/; // issue #996
Expand All @@ -102,7 +102,24 @@ void test4()
{
auto txt = R"(
Hello World!
)";
)";
}

// issue #2197
namespace Test {
class Test {
private:
bool import = false;
}
}

// issue #2192
void test5()
{
assert(module != nullptr);
for (int module=0; module<10; module++) {}
char modules[]= {}
for (auto module : modules) {}
}

/* EOF */

0 comments on commit 6bf0302

Please sign in to comment.