diff --git a/src/main/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensor.java b/src/main/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensor.java index 95d84533..64dc0abf 100644 --- a/src/main/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensor.java +++ b/src/main/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensor.java @@ -30,7 +30,6 @@ import com.github._1c_syntax.bsl.languageserver.context.ServerContext; import com.github._1c_syntax.bsl.languageserver.diagnostics.metadata.DiagnosticInfo; import com.github._1c_syntax.bsl.languageserver.diagnostics.metadata.DiagnosticParameterInfo; -import com.github._1c_syntax.bsl.parser.BSLLexer; import com.github._1c_syntax.bsl.sonar.language.BSLLanguage; import com.github._1c_syntax.bsl.sonar.language.BSLLanguageServerRuleDefinition; import com.github._1c_syntax.utils.Absolute; @@ -50,8 +49,6 @@ import org.sonar.api.batch.sensor.SensorDescriptor; import org.sonar.api.batch.sensor.coverage.NewCoverage; import org.sonar.api.batch.sensor.cpd.NewCpdTokens; -import org.sonar.api.batch.sensor.highlighting.NewHighlighting; -import org.sonar.api.batch.sensor.highlighting.TypeOfText; import org.sonar.api.measures.CoreMetrics; import org.sonar.api.measures.FileLinesContext; import org.sonar.api.measures.FileLinesContextFactory; @@ -59,7 +56,6 @@ import org.sonar.api.utils.log.Logger; import org.sonar.api.utils.log.Loggers; -import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.net.URI; @@ -84,6 +80,7 @@ public class BSLCoreSensor implements Sensor { private final boolean langServerEnabled; private final List sourcesList; private final IssuesLoader issuesLoader; + private final BSLHighlighter highlighter; private final boolean calculateCoverLoc; @@ -105,6 +102,7 @@ public BSLCoreSensor(SensorContext context, FileLinesContextFactory fileLinesCon .orElse(Collections.singletonList(".")); issuesLoader = new IssuesLoader(context); + highlighter = new BSLHighlighter(context); } @Override @@ -195,7 +193,7 @@ private void processFile(InputFile inputFile, ServerContext bslServerContext) { } saveCpd(inputFile, documentContext); - saveHighlighting(inputFile, documentContext); + highlighter.saveHighlighting(inputFile, documentContext); saveMeasures(inputFile, documentContext); saveCoverageLoc(inputFile, documentContext); @@ -230,34 +228,6 @@ private void saveCpd(InputFile inputFile, DocumentContext documentContext) { } - private void saveHighlighting(InputFile inputFile, DocumentContext documentContext) { - - NewHighlighting highlighting = context.newHighlighting().onFile(inputFile); - - documentContext.getTokens().forEach((Token token) -> { - TypeOfText typeOfText = getTypeOfText(token.getType()); - - if (typeOfText == null) { - return; - } - - int line = token.getLine(); - int charPositionInLine = token.getCharPositionInLine(); - String tokenText = token.getText(); - - highlighting.highlight( - line, - charPositionInLine, - line, - charPositionInLine + tokenText.length(), - typeOfText - ); - }); - - highlighting.save(); - - } - private void saveMeasures(InputFile inputFile, DocumentContext documentContext) { MetricStorage metrics = documentContext.getMetrics(); @@ -401,100 +371,6 @@ private LanguageServerConfiguration getLanguageServerConfiguration() { return configuration; } - @Nullable - private static TypeOfText getTypeOfText(int tokenType) { - - TypeOfText typeOfText = null; - - switch (tokenType) { - case BSLLexer.PROCEDURE_KEYWORD: - case BSLLexer.FUNCTION_KEYWORD: - case BSLLexer.ENDPROCEDURE_KEYWORD: - case BSLLexer.ENDFUNCTION_KEYWORD: - case BSLLexer.EXPORT_KEYWORD: - case BSLLexer.VAL_KEYWORD: - case BSLLexer.ENDIF_KEYWORD: - case BSLLexer.ENDDO_KEYWORD: - case BSLLexer.IF_KEYWORD: - case BSLLexer.ELSIF_KEYWORD: - case BSLLexer.ELSE_KEYWORD: - case BSLLexer.THEN_KEYWORD: - case BSLLexer.WHILE_KEYWORD: - case BSLLexer.DO_KEYWORD: - case BSLLexer.FOR_KEYWORD: - case BSLLexer.TO_KEYWORD: - case BSLLexer.EACH_KEYWORD: - case BSLLexer.IN_KEYWORD: - case BSLLexer.TRY_KEYWORD: - case BSLLexer.EXCEPT_KEYWORD: - case BSLLexer.ENDTRY_KEYWORD: - case BSLLexer.RETURN_KEYWORD: - case BSLLexer.CONTINUE_KEYWORD: - case BSLLexer.RAISE_KEYWORD: - case BSLLexer.VAR_KEYWORD: - case BSLLexer.NOT_KEYWORD: - case BSLLexer.OR_KEYWORD: - case BSLLexer.AND_KEYWORD: - case BSLLexer.NEW_KEYWORD: - case BSLLexer.GOTO_KEYWORD: - case BSLLexer.BREAK_KEYWORD: - case BSLLexer.EXECUTE_KEYWORD: - typeOfText = TypeOfText.KEYWORD; - break; - case BSLLexer.TRUE: - case BSLLexer.FALSE: - case BSLLexer.UNDEFINED: - case BSLLexer.NULL: - case BSLLexer.DATETIME: - case BSLLexer.DECIMAL: - case BSLLexer.FLOAT: - typeOfText = TypeOfText.CONSTANT; - break; - case BSLLexer.STRING: - case BSLLexer.STRINGSTART: - case BSLLexer.STRINGPART: - case BSLLexer.STRINGTAIL: - case BSLLexer.PREPROC_STRING: - typeOfText = TypeOfText.STRING; - break; - case BSLLexer.LINE_COMMENT: - typeOfText = TypeOfText.COMMENT; - break; - case BSLLexer.HASH: - case BSLLexer.PREPROC_USE_KEYWORD: - case BSLLexer.PREPROC_REGION: - case BSLLexer.PREPROC_END_REGION: - case BSLLexer.PREPROC_AND_KEYWORD: - case BSLLexer.PREPROC_OR_KEYWORD: - case BSLLexer.PREPROC_NOT_KEYWORD: - case BSLLexer.PREPROC_IF_KEYWORD: - case BSLLexer.PREPROC_THEN_KEYWORD: - case BSLLexer.PREPROC_ELSIF_KEYWORD: - case BSLLexer.PREPROC_ELSE_KEYWORD: - case BSLLexer.PREPROC_ENDIF_KEYWORD: - typeOfText = TypeOfText.PREPROCESS_DIRECTIVE; - break; - case BSLLexer.AMPERSAND: - case BSLLexer.ANNOTATION_AFTER_SYMBOL: - case BSLLexer.ANNOTATION_AROUND_SYMBOL: - case BSLLexer.ANNOTATION_ATCLIENT_SYMBOL: - case BSLLexer.ANNOTATION_ATCLIENTATSERVER_SYMBOL: - case BSLLexer.ANNOTATION_ATCLIENTATSERVERNOCONTEXT_SYMBOL: - case BSLLexer.ANNOTATION_ATSERVER_SYMBOL: - case BSLLexer.ANNOTATION_ATSERVERNOCONTEXT_SYMBOL: - case BSLLexer.ANNOTATION_BEFORE_SYMBOL: - case BSLLexer.ANNOTATION_CHANGEANDVALIDATE_SYMBOL: - case BSLLexer.ANNOTATION_CUSTOM_SYMBOL: - typeOfText = TypeOfText.ANNOTATION; - break; - default: - // no-op - } - - return typeOfText; - - } - private static Object castDiagnosticParameterValue(String valueToCast, Class type) { Object value; if (type == Integer.class) { diff --git a/src/main/java/com/github/_1c_syntax/bsl/sonar/BSLHighlighter.java b/src/main/java/com/github/_1c_syntax/bsl/sonar/BSLHighlighter.java new file mode 100644 index 00000000..893d1c84 --- /dev/null +++ b/src/main/java/com/github/_1c_syntax/bsl/sonar/BSLHighlighter.java @@ -0,0 +1,613 @@ +/* + * This file is a part of SonarQube 1C (BSL) Community Plugin. + * + * Copyright © 2018-2020 + * Alexey Sosnoviy , Nikita Gryzlov + * + * SPDX-License-Identifier: LGPL-3.0-or-later + * + * SonarQube 1C (BSL) Community Plugin is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3.0 of the License, or (at your option) any later version. + * + * SonarQube 1C (BSL) Community Plugin is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with SonarQube 1C (BSL) Community Plugin. + */ +package com.github._1c_syntax.bsl.sonar; + +import com.github._1c_syntax.bsl.languageserver.context.DocumentContext; +import com.github._1c_syntax.bsl.languageserver.utils.Ranges; +import com.github._1c_syntax.bsl.parser.BSLLexer; +import com.github._1c_syntax.bsl.parser.SDBLLexer; +import com.github._1c_syntax.bsl.parser.Tokenizer; +import lombok.Data; +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; +import org.antlr.v4.runtime.Token; +import org.eclipse.lsp4j.Position; +import org.eclipse.lsp4j.Range; +import org.sonar.api.batch.fs.InputFile; +import org.sonar.api.batch.sensor.SensorContext; +import org.sonar.api.batch.sensor.highlighting.NewHighlighting; +import org.sonar.api.batch.sensor.highlighting.TypeOfText; + +import javax.annotation.Nullable; +import java.util.Collection; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +@RequiredArgsConstructor +public class BSLHighlighter { + + private static final Set BSL_KEYWORDS = createBslKeywords(); + private static final Set BSL_SEPARATORS = createBslSeparators(); + private static final Set BSL_LITERALS = createBslLiterals(); + private static final Set BSL_STRINGS = createBslStrings(); + private static final Set BSL_COMMENTS = createBslComments(); + private static final Set BSL_PREPROCESSOR = createBslPreprocessor(); + private static final Set BSL_ANNOTATIONS = createBslAnnotations(); + + private static final Set SDBL_KEYWORDS = createSdblKeywords(); + private static final Set SDBL_FUNCTIONS = createSdblFunctions(); + private static final Set SDBL_METADATA_TYPES = createSdblMetadataTypes(); + private static final Set SDBL_VIRTUAL_TABLES = createSdblVirtualTables(); + private static final Set SDBL_LITERALS = createSdblLiterals(); + private static final Set SDBL_SEPARATORS = createSdblSeparators(); + private static final Set SDBL_STRINGS = createSdblStrings(); + private static final Set SDBL_COMMENTS = createSdblComments(); + private static final Set SDBL_PARAMETERS = createSdblParameters(); + + private final SensorContext context; + + public void saveHighlighting(InputFile inputFile, DocumentContext documentContext) { + Set highlightingData = new HashSet<>(documentContext.getTokens().size()); + + // populate bsl highlight data + documentContext.getTokens().forEach(token -> + highlightToken(token, highlightingData, getTypeOfTextBSL(token.getType())) + ); + + // compute and populate sdbl highlight data + Map> queryTokens = documentContext.getQueries().stream() + .map(Tokenizer::getTokens) + .flatMap(Collection::stream) + .collect(Collectors.groupingBy(Token::getLine)); + Map> highlightingDataSDBL = new HashMap<>(queryTokens.size()); + + queryTokens.values().stream() + .flatMap(Collection::stream) + .forEach(token -> highlightToken( + token, + highlightingDataSDBL.computeIfAbsent(token.getLine(), BSLHighlighter::newHashSet), + getTypeOfTextSDBL(token.getType())) + ); + + // find bsl strings to check overlap with sdbl tokens + Set strings = highlightingData.stream() + .filter(data -> data.getType() == TypeOfText.STRING) + .collect(Collectors.toSet()); + + strings.forEach((HighlightingData string) -> { + Range stringRange = string.getRange(); + + // find overlapping tokens + Set dataOfCurrentLine = highlightingDataSDBL.get(stringRange.getStart().getLine()); + if (Objects.isNull(dataOfCurrentLine)) { + return; + } + + List currentTokens = dataOfCurrentLine.stream() + .filter(sdblData -> Ranges.containsRange(stringRange, sdblData.getRange())) + .sorted(Comparator.comparing(data -> data.getRange().getStart().getCharacter())) + .collect(Collectors.toList()); + + if (currentTokens.isEmpty()) { + return; + } + + // disable current bsl token + string.setActive(false); + + // split current bsl token to parts excluding sdbl tokens + Position start = stringRange.getStart(); + int line = start.getLine(); + int startChar; + int endChar = start.getCharacter(); + for (HighlightingData currentToken : currentTokens) { + startChar = endChar; + endChar = currentToken.getRange().getStart().getCharacter(); + TypeOfText typeOfText = string.getType(); + + if (startChar < endChar) { + // add string part + highlightingData.add(new HighlightingData( + line, + startChar, + endChar, + typeOfText + )); + } + + endChar = currentToken.getRange().getEnd().getCharacter(); + } + + // add final string part + startChar = endChar; + endChar = string.getRange().getEnd().getCharacter(); + TypeOfText typeOfText = string.getType(); + + if (startChar < endChar) { + highlightingData.add(new HighlightingData( + line, + startChar, + endChar, + typeOfText + )); + } + }); + + // merge collected bsl tokens with sdbl tokens + highlightingDataSDBL.values().forEach(highlightingData::addAll); + + // save only active tokens + NewHighlighting highlighting = context.newHighlighting().onFile(inputFile); + + highlightingData.stream() + .filter(HighlightingData::isActive) + .forEach(data -> + highlighting.highlight( + data.getRange().getStart().getLine(), + data.getRange().getStart().getCharacter(), + data.getRange().getEnd().getLine(), + data.getRange().getEnd().getCharacter(), + data.getType() + ) + ); + + highlighting.save(); + } + + public void highlightToken( + Token token, + Collection highlightingData, + @Nullable TypeOfText typeOfText + ) { + if (typeOfText == null) { + return; + } + + int line = token.getLine(); + int charPositionInLine = token.getCharPositionInLine(); + String tokenText = token.getText(); + + Range range = Ranges.create( + line, + charPositionInLine, + line, + charPositionInLine + tokenText.length() + ); + + HighlightingData data = new HighlightingData( + range, + typeOfText + ); + + highlightingData.add(data); + } + + @Nullable + private static TypeOfText getTypeOfTextBSL(int tokenType) { + TypeOfText typeOfText; + + if (BSL_KEYWORDS.contains(tokenType)) { + typeOfText = TypeOfText.KEYWORD; + } else if (BSL_SEPARATORS.contains(tokenType)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (BSL_LITERALS.contains(tokenType)) { + typeOfText = TypeOfText.CONSTANT; + } else if (BSL_STRINGS.contains(tokenType)) { + typeOfText = TypeOfText.STRING; + } else if (BSL_COMMENTS.contains(tokenType)) { + typeOfText = TypeOfText.COMMENT; + } else if (BSL_PREPROCESSOR.contains(tokenType)) { + typeOfText = TypeOfText.PREPROCESS_DIRECTIVE; + } else if (BSL_ANNOTATIONS.contains(tokenType)) { + typeOfText = TypeOfText.ANNOTATION; + } else { + typeOfText = null; + } + + return typeOfText; + } + + @Nullable + private static TypeOfText getTypeOfTextSDBL(int tokenType) { + TypeOfText typeOfText; + + if (SDBL_KEYWORDS.contains(tokenType)) { + typeOfText = TypeOfText.KEYWORD; + } else if (SDBL_FUNCTIONS.contains(tokenType)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (SDBL_METADATA_TYPES.contains(tokenType)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (SDBL_VIRTUAL_TABLES.contains(tokenType)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (SDBL_LITERALS.contains(tokenType)) { + typeOfText = TypeOfText.CONSTANT; + } else if (SDBL_SEPARATORS.contains(tokenType)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (SDBL_STRINGS.contains(tokenType)) { + typeOfText = TypeOfText.STRING; + } else if (SDBL_COMMENTS.contains(tokenType)) { + typeOfText = TypeOfText.COMMENT; + } else if (SDBL_PARAMETERS.contains(tokenType)) { + typeOfText = TypeOfText.ANNOTATION; + } else { + typeOfText = null; + } + + return typeOfText; + } + + private static Set newHashSet(Integer line) { + return new HashSet<>(); + } + + private static Set createBslAnnotations() { + return Set.of( + BSLLexer.AMPERSAND, + BSLLexer.ANNOTATION_AFTER_SYMBOL, + BSLLexer.ANNOTATION_AROUND_SYMBOL, + BSLLexer.ANNOTATION_ATCLIENT_SYMBOL, + BSLLexer.ANNOTATION_ATCLIENTATSERVER_SYMBOL, + BSLLexer.ANNOTATION_ATCLIENTATSERVERNOCONTEXT_SYMBOL, + BSLLexer.ANNOTATION_ATSERVER_SYMBOL, + BSLLexer.ANNOTATION_ATSERVERNOCONTEXT_SYMBOL, + BSLLexer.ANNOTATION_BEFORE_SYMBOL, + BSLLexer.ANNOTATION_CHANGEANDVALIDATE_SYMBOL, + BSLLexer.ANNOTATION_CUSTOM_SYMBOL + ); + } + + private static Set createBslPreprocessor() { + return Set.of( + BSLLexer.HASH, + BSLLexer.PREPROC_USE_KEYWORD, + BSLLexer.PREPROC_REGION, + BSLLexer.PREPROC_END_REGION, + BSLLexer.PREPROC_AND_KEYWORD, + BSLLexer.PREPROC_OR_KEYWORD, + BSLLexer.PREPROC_NOT_KEYWORD, + BSLLexer.PREPROC_IF_KEYWORD, + BSLLexer.PREPROC_THEN_KEYWORD, + BSLLexer.PREPROC_ELSIF_KEYWORD, + BSLLexer.PREPROC_ELSE_KEYWORD, + BSLLexer.PREPROC_ENDIF_KEYWORD, + BSLLexer.PREPROC_EXCLAMATION_MARK, + BSLLexer.PREPROC_LPAREN, + BSLLexer.PREPROC_RPAREN, + BSLLexer.PREPROC_MOBILEAPPCLIENT_SYMBOL, + BSLLexer.PREPROC_MOBILEAPPSERVER_SYMBOL, + BSLLexer.PREPROC_MOBILECLIENT_SYMBOL, + BSLLexer.PREPROC_THICKCLIENTORDINARYAPPLICATION_SYMBOL, + BSLLexer.PREPROC_THICKCLIENTMANAGEDAPPLICATION_SYMBOL, + BSLLexer.PREPROC_EXTERNALCONNECTION_SYMBOL, + BSLLexer.PREPROC_THINCLIENT_SYMBOL, + BSLLexer.PREPROC_WEBCLIENT_SYMBOL, + BSLLexer.PREPROC_ATCLIENT_SYMBOL, + BSLLexer.PREPROC_CLIENT_SYMBOL, + BSLLexer.PREPROC_ATSERVER_SYMBOL, + BSLLexer.PREPROC_SERVER_SYMBOL, + BSLLexer.PREPROC_INSERT_SYMBOL, + BSLLexer.PREPROC_ENDINSERT_SYMBOL, + BSLLexer.PREPROC_DELETE_SYMBOL, + BSLLexer.PREPROC_ENDDELETE_SYMBOL, + BSLLexer.PREPROC_IDENTIFIER, + BSLLexer.PREPROC_ANY + ); + } + + private static Set createBslComments() { + return Set.of( + BSLLexer.LINE_COMMENT, + BSLLexer.PREPROC_LINE_COMMENT + ); + } + + private static Set createBslStrings() { + return Set.of( + BSLLexer.STRING, + BSLLexer.STRINGSTART, + BSLLexer.STRINGPART, + BSLLexer.STRINGTAIL, + BSLLexer.PREPROC_STRING, + BSLLexer.PREPROC_STRINGSTART, + BSLLexer.PREPROC_STRINGTAIL, + BSLLexer.PREPROC_STRINGPART + ); + } + + private static Set createBslLiterals() { + return Set.of( + BSLLexer.TRUE, + BSLLexer.FALSE, + BSLLexer.UNDEFINED, + BSLLexer.NULL, + BSLLexer.DATETIME, + BSLLexer.DECIMAL, + BSLLexer.FLOAT + ); + } + + private static Set createBslSeparators() { + return Set.of( + BSLLexer.SEMICOLON, + BSLLexer.QUESTION, + BSLLexer.PLUS, + BSLLexer.MINUS, + BSLLexer.MUL, + BSLLexer.QUOTIENT, + BSLLexer.MODULO, + BSLLexer.ASSIGN, + BSLLexer.LESS_OR_EQUAL, + BSLLexer.LESS, + BSLLexer.NOT_EQUAL, + BSLLexer.GREATER_OR_EQUAL, + BSLLexer.GREATER, + BSLLexer.COMMA, + BSLLexer.COLON, + BSLLexer.TILDA + ); + } + + private static Set createBslKeywords() { + return Set.of( + BSLLexer.PROCEDURE_KEYWORD, + BSLLexer.FUNCTION_KEYWORD, + BSLLexer.ENDPROCEDURE_KEYWORD, + BSLLexer.ENDFUNCTION_KEYWORD, + BSLLexer.EXPORT_KEYWORD, + BSLLexer.VAL_KEYWORD, + BSLLexer.ENDIF_KEYWORD, + BSLLexer.ENDDO_KEYWORD, + BSLLexer.IF_KEYWORD, + BSLLexer.ELSIF_KEYWORD, + BSLLexer.ELSE_KEYWORD, + BSLLexer.THEN_KEYWORD, + BSLLexer.WHILE_KEYWORD, + BSLLexer.DO_KEYWORD, + BSLLexer.FOR_KEYWORD, + BSLLexer.TO_KEYWORD, + BSLLexer.EACH_KEYWORD, + BSLLexer.IN_KEYWORD, + BSLLexer.TRY_KEYWORD, + BSLLexer.EXCEPT_KEYWORD, + BSLLexer.ENDTRY_KEYWORD, + BSLLexer.RETURN_KEYWORD, + BSLLexer.CONTINUE_KEYWORD, + BSLLexer.RAISE_KEYWORD, + BSLLexer.VAR_KEYWORD, + BSLLexer.NOT_KEYWORD, + BSLLexer.OR_KEYWORD, + BSLLexer.AND_KEYWORD, + BSLLexer.NEW_KEYWORD, + BSLLexer.GOTO_KEYWORD, + BSLLexer.BREAK_KEYWORD, + BSLLexer.EXECUTE_KEYWORD, + BSLLexer.ADDHANDLER_KEYWORD, + BSLLexer.REMOVEHANDLER_KEYWORD + ); + } + + private static Set createSdblSeparators() { + return Set.of( + SDBLLexer.SEMICOLON, + SDBLLexer.PLUS, + SDBLLexer.MINUS, + SDBLLexer.MUL, + SDBLLexer.QUOTIENT, + SDBLLexer.ASSIGN, + SDBLLexer.LESS_OR_EQUAL, + SDBLLexer.LESS, + SDBLLexer.NOT_EQUAL, + SDBLLexer.GREATER_OR_EQUAL, + SDBLLexer.GREATER, + SDBLLexer.COMMA, + SDBLLexer.BRACE, + SDBLLexer.BRACE_START + ); + } + + private static Set createSdblLiterals() { + return Set.of( + SDBLLexer.TRUE, + SDBLLexer.FALSE, + SDBLLexer.UNDEFINED, + SDBLLexer.NULL, + SDBLLexer.DECIMAL, + SDBLLexer.FLOAT + ); + } + + private static Set createSdblVirtualTables() { + return Set.of( + SDBLLexer.ACTUAL_ACTION_PERIOD_VT, + SDBLLexer.BALANCE_VT, + SDBLLexer.BALANCE_AND_TURNOVERS_VT, + SDBLLexer.BOUNDARIES_VT, + SDBLLexer.DR_CR_TURNOVERS_VT, + SDBLLexer.EXT_DIMENSIONS_VT, + SDBLLexer.RECORDS_WITH_EXT_DIMENSIONS_VT, + SDBLLexer.SCHEDULE_DATA_VT, + SDBLLexer.SLICEFIRST_VT, + SDBLLexer.SLICELAST_VT, + SDBLLexer.TASK_BY_PERFORMER_VT, + SDBLLexer.TURNOVERS_VT + ); + } + + private static Set createSdblMetadataTypes() { + return Set.of( + SDBLLexer.ACCOUNTING_REGISTER_TYPE, + SDBLLexer.ACCUMULATION_REGISTER_TYPE, + SDBLLexer.BUSINESS_PROCESS_TYPE, + SDBLLexer.CALCULATION_REGISTER_TYPE, + SDBLLexer.CATALOG_TYPE, + SDBLLexer.CHART_OF_ACCOUNTS_TYPE, + SDBLLexer.CHART_OF_CALCULATION_TYPES_TYPE, + SDBLLexer.CHART_OF_CHARACTERISTIC_TYPES_TYPE, + SDBLLexer.CONSTANT_TYPE, + SDBLLexer.DOCUMENT_TYPE, + SDBLLexer.DOCUMENT_JOURNAL_TYPE, + SDBLLexer.ENUM_TYPE, + SDBLLexer.EXCHANGE_PLAN_TYPE, + SDBLLexer.EXTERNAL_DATA_SOURCE_TYPE, + SDBLLexer.FILTER_CRITERION_TYPE, + SDBLLexer.INFORMATION_REGISTER_TYPE, + SDBLLexer.SEQUENCE_TYPE, + SDBLLexer.TASK_TYPE + ); + } + + private static Set createSdblFunctions() { + return Set.of( + SDBLLexer.AVG, + SDBLLexer.BEGINOFPERIOD, + SDBLLexer.BOOLEAN, + SDBLLexer.COUNT, + SDBLLexer.DATE, + SDBLLexer.DATEADD, + SDBLLexer.DATEDIFF, + SDBLLexer.DATETIME, + SDBLLexer.DAY, + SDBLLexer.DAYOFYEAR, + SDBLLexer.EMPTYTABLE, + SDBLLexer.ENDOFPERIOD, + SDBLLexer.HALFYEAR, + SDBLLexer.HOUR, + SDBLLexer.MAX, + SDBLLexer.MIN, + SDBLLexer.MINUTE, + SDBLLexer.MONTH, + SDBLLexer.NUMBER, + SDBLLexer.QUARTER, + SDBLLexer.PRESENTATION, + SDBLLexer.RECORDAUTONUMBER, + SDBLLexer.REFPRESENTATION, + SDBLLexer.SECOND, + SDBLLexer.STRING, + SDBLLexer.SUBSTRING, + SDBLLexer.SUM, + SDBLLexer.TENDAYS, + SDBLLexer.TYPE, + SDBLLexer.VALUE, + SDBLLexer.VALUETYPE, + SDBLLexer.WEEK, + SDBLLexer.WEEKDAY, + SDBLLexer.YEAR + ); + } + + private static Set createSdblKeywords() { + return Set.of( + SDBLLexer.ALL, + SDBLLexer.ALLOWED, + SDBLLexer.AND, + SDBLLexer.AS, + SDBLLexer.ASC, + SDBLLexer.AUTOORDER, + SDBLLexer.BETWEEN, + SDBLLexer.BY_EN, + SDBLLexer.CASE, + SDBLLexer.CAST, + SDBLLexer.DESC, + SDBLLexer.DISTINCT, + SDBLLexer.DROP, + SDBLLexer.ELSE, + SDBLLexer.END, + SDBLLexer.ESCAPE, + SDBLLexer.FALSE, + SDBLLexer.FOR, + SDBLLexer.FROM, + SDBLLexer.FULL, + SDBLLexer.GROUP, + SDBLLexer.HAVING, + SDBLLexer.HIERARCHY_EN, + SDBLLexer.HIERARCHII_RU, + SDBLLexer.HIERARCHYA_RU, + SDBLLexer.IN, + SDBLLexer.INDEX, + SDBLLexer.INNER, + SDBLLexer.INTO, + SDBLLexer.IS, + SDBLLexer.ISNULL, + SDBLLexer.JOIN, + SDBLLexer.LEFT, + SDBLLexer.LIKE, + SDBLLexer.NOT, + SDBLLexer.OF, + SDBLLexer.ON_EN, + SDBLLexer.OR, + SDBLLexer.ORDER, + SDBLLexer.OUTER, + SDBLLexer.OVERALL, + SDBLLexer.PO_RU, + SDBLLexer.RIGHT, + SDBLLexer.SELECT, + SDBLLexer.THEN, + SDBLLexer.TOP, + SDBLLexer.TOTALS, + SDBLLexer.UNION, + SDBLLexer.WHEN, + SDBLLexer.WHERE, + SDBLLexer.ONLY, + SDBLLexer.PERIODS, + SDBLLexer.REFS, + SDBLLexer.UPDATE + ); + } + + private static Set createSdblStrings() { + return Set.of( + SDBLLexer.STR + ); + } + + private static Set createSdblComments() { + return Set.of( + SDBLLexer.LINE_COMMENT + ); + } + + private static Set createSdblParameters() { + return Set.of( + SDBLLexer.AMPERSAND, + SDBLLexer.PARAMETER_IDENTIFIER + ); + } + + @Data + @RequiredArgsConstructor + @EqualsAndHashCode(exclude = "active") + private static class HighlightingData { + private final Range range; + private final TypeOfText type; + private boolean active = true; + + public HighlightingData(int line, int startChar, int endChar, TypeOfText type) { + this(Ranges.create(line, startChar, endChar), type); + } + } +} diff --git a/src/test/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensorTest.java b/src/test/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensorTest.java index 416599a7..e564dcb3 100644 --- a/src/test/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensorTest.java +++ b/src/test/java/com/github/_1c_syntax/bsl/sonar/BSLCoreSensorTest.java @@ -40,6 +40,7 @@ import org.sonar.api.utils.Version; import java.io.File; +import java.nio.charset.StandardCharsets; import java.nio.file.Path; import static org.assertj.core.api.Assertions.assertThat; @@ -231,10 +232,10 @@ private NewActiveRule newActiveRule(String diagnosticName) { .build(); } - private SensorContextTester createSensorContext() { SonarRuntime sonarRuntime = SonarRuntimeImpl.forSonarLint(SONAR_VERSION); SensorContextTester context = SensorContextTester.create(BASE_DIR); + context.fileSystem().setEncoding(StandardCharsets.UTF_8); context.setRuntime(sonarRuntime); InputFile inputFile = Tools.inputFileBSL(FILE_NAME, BASE_DIR); diff --git a/src/test/java/com/github/_1c_syntax/bsl/sonar/BSLHighlighterTest.java b/src/test/java/com/github/_1c_syntax/bsl/sonar/BSLHighlighterTest.java new file mode 100644 index 00000000..fc34fe72 --- /dev/null +++ b/src/test/java/com/github/_1c_syntax/bsl/sonar/BSLHighlighterTest.java @@ -0,0 +1,502 @@ +/* + * This file is a part of SonarQube 1C (BSL) Community Plugin. + * + * Copyright © 2018-2020 + * Alexey Sosnoviy , Nikita Gryzlov + * + * SPDX-License-Identifier: LGPL-3.0-or-later + * + * SonarQube 1C (BSL) Community Plugin is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 3.0 of the License, or (at your option) any later version. + * + * SonarQube 1C (BSL) Community Plugin is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with SonarQube 1C (BSL) Community Plugin. + */ +package com.github._1c_syntax.bsl.sonar; + +import com.github._1c_syntax.bsl.languageserver.context.DocumentContext; +import com.github._1c_syntax.bsl.parser.BSLLexer; +import com.github._1c_syntax.bsl.parser.SDBLLexer; +import com.github._1c_syntax.bsl.parser.SDBLTokenizer; +import org.antlr.v4.runtime.CommonToken; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.Vocabulary; +import org.junit.jupiter.api.Test; +import org.sonar.api.batch.fs.InputFile; +import org.sonar.api.batch.sensor.highlighting.TypeOfText; +import org.sonar.api.batch.sensor.internal.SensorContextTester; + +import java.io.File; +import java.net.URI; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class BSLHighlighterTest { + + private final String BASE_PATH = "src/test/resources/src"; + private final File BASE_DIR = new File(BASE_PATH).getAbsoluteFile(); + private final String FILE_NAME = "test.bsl"; + + private SensorContextTester context; + private BSLHighlighter highlighter; + private DocumentContext documentContext; + private InputFile inputFile; + + @Test + void testHighlightingBSL() { + + // given + Vocabulary vocabulary = BSLLexer.VOCABULARY; + Map highlightingMap = getHighlightingMapBSL(vocabulary); + + // then + testHighlighting(vocabulary, highlightingMap); + } + + @Test + void testHighlightingSDBL() { + // given + Vocabulary vocabulary = SDBLLexer.VOCABULARY; + Map highlightingMap = getHighlightingMapSDBL(vocabulary); + + // then + testHighlighting(vocabulary, highlightingMap); + } + + @Test + void testMergeHighlightingTokens() { + // given + context = SensorContextTester.create(Path.of(".")); + highlighter = new BSLHighlighter(context); + String content = "А = \"ВЫБРАТЬ РАЗРЕШЕННЫЕ Поле.Один \n" + + "|КАК \n" + + "| Один, 2 \n" + + " | КАК Два ИЗ Справочник.Поле\n" + + "|АВТОУПОРЯДОЧИВАНИЕ;\";"; + documentContext = new DocumentContext(URI.create("file:///fake.bsl"), content, null, null); + documentContext.rebuild(content); + + inputFile = Tools.inputFileBSL(FILE_NAME, BASE_DIR, content); + + // when + highlighter.saveHighlighting(inputFile, documentContext); + + // then + String componentKey = "moduleKey:" + FILE_NAME; + + checkTokenTypeAtPosition(componentKey, 1, 4, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 1, 5, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 1, 6, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 1, 12, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 1, 13, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 1, 25, TypeOfText.STRING); + + checkTokenTypeAtPosition(componentKey, 2, 0, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 2, 1, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 2, 2, TypeOfText.KEYWORD); + + checkTokenTypeAtPosition(componentKey, 3, 0, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 3, 1, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 3, 5, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 3, 9, TypeOfText.CONSTANT); + + checkTokenTypeAtPosition(componentKey, 4, 1, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 4, 2, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 4, 6, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 4, 10, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 4, 13, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 4, 16, TypeOfText.KEYWORD_LIGHT); + + checkTokenTypeAtPosition(componentKey, 5, 0, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 5, 1, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 5, 18, TypeOfText.KEYWORD); + checkTokenTypeAtPosition(componentKey, 5, 19, TypeOfText.KEYWORD_LIGHT); + checkTokenTypeAtPosition(componentKey, 5, 20, TypeOfText.STRING); + checkTokenTypeAtPosition(componentKey, 5, 21, TypeOfText.KEYWORD_LIGHT); + + } + + private void testHighlighting(Vocabulary vocabulary, Map highlightingMap) { + // given + initContext(vocabulary); + + // when + highlighter.saveHighlighting(inputFile, documentContext); + + // then + checkHighlighting(vocabulary, context, highlightingMap); + } + + private void checkTokenTypeAtPosition(String componentKey, int line, int character, TypeOfText typeOfText) { + List typeOfTexts = context.highlightingTypeAt(componentKey, line, character); + assertThat(typeOfTexts) + .as("Position %d:%d should have typeOfText %s", line, character, typeOfText) + .contains(typeOfText); + } + + private void initContext(Vocabulary vocabulary) { + context = SensorContextTester.create(Path.of(".")); + highlighter = new BSLHighlighter(context); + documentContext = mock(DocumentContext.class); + List tokens = new ArrayList<>(); + + int maxTokenType = vocabulary.getMaxTokenType(); + for (var tokenType = 1; tokenType <= maxTokenType; tokenType++) { + var token = new CommonToken(tokenType, "a"); + token.setLine(1); + token.setCharPositionInLine(tokenType - 1); + tokens.add(token); + } + + String content = tokens.stream() + .map(Token::getText) + .collect(Collectors.joining()); + + if (vocabulary.equals(SDBLLexer.VOCABULARY)) { + SDBLTokenizer sdblTokenizer = mock(SDBLTokenizer.class); + when(sdblTokenizer.getTokens()).thenReturn(tokens); + when(documentContext.getQueries()).thenReturn(List.of(sdblTokenizer)); + } else { + when(documentContext.getTokens()).thenReturn(tokens); + } + inputFile = Tools.inputFileBSL(FILE_NAME, BASE_DIR, content); + } + + private void checkHighlighting(Vocabulary vocabulary, SensorContextTester context, Map highlightingMap) { + int maxTokenType = vocabulary.getMaxTokenType(); + String componentKey = "moduleKey:" + FILE_NAME; + + assertThat(IntStream.range(1, maxTokenType)) + .isNotEmpty() + .allSatisfy(tokenType -> { + String symbolicTokenName = vocabulary.getSymbolicName(tokenType); + // no need to check lexer fragments or invisible names. + if (symbolicTokenName == null) { + return; + } + TypeOfText typeOfText = highlightingMap.get(symbolicTokenName); + if (typeOfText == null) { + return; + } + + List typeOfTexts = context.highlightingTypeAt(componentKey, 1, tokenType - 1); + assertThat(typeOfTexts) + .as("Symbolic token name %s should maps to typeOfText %s", symbolicTokenName, typeOfText) + .contains(typeOfText); + }); + } + + private Map getHighlightingMapBSL(Vocabulary vocabulary) { + + Set literals = Set.of( + "TRUE", + "FALSE", + "UNDEFINED", + "NULL", + "DATETIME", + "DECIMAL", + "FLOAT" + ); + + Set punctuators = Set.of( + "SEMICOLON", + "QUESTION", + "PLUS", + "MINUS", + "MUL", + "QUOTIENT", + "MODULO", + "ASSIGN", + "LESS_OR_EQUAL", + "LESS", + "NOT_EQUAL", + "GREATER_OR_EQUAL", + "GREATER", + "COMMA", + "COLON", + "TILDA" + ); + + Set noOpTypes = Set.of( + "WHITE_SPACE", + "DOT", + "LBRACK", + "RBRACK", + "LPAREN", + "RPAREN", + "SQUOTE", + "IDENTIFIER", + "UNKNOWN", + "PREPROC_NEWLINE", + "BAR" + ); + + int maxTokenType = vocabulary.getMaxTokenType(); + + Map highlightingMap = new HashMap<>(); + for (int tokenType = 1; tokenType <= maxTokenType; tokenType++) { + String ruleName = vocabulary.getSymbolicName(tokenType); + // no need to check lexer fragments or invisible names. + if (ruleName == null) { + continue; + } + + TypeOfText typeOfText = null; + if (noOpTypes.contains(ruleName)) { + continue; + } else if (ruleName.endsWith("_KEYWORD") && !ruleName.startsWith("PREPROC_")) { + typeOfText = TypeOfText.KEYWORD; + } else if (literals.contains(ruleName)) { + typeOfText = TypeOfText.CONSTANT; + } else if (punctuators.contains(ruleName)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (ruleName.contains("STRING")) { + typeOfText = TypeOfText.STRING; + } else if (ruleName.contains("LINE_COMMENT")) { + typeOfText = TypeOfText.COMMENT; + } else if (ruleName.equals("AMPERSAND") || ruleName.contains("ANNOTATION_")) { + typeOfText = TypeOfText.ANNOTATION; + } else if (ruleName.equals("HASH") || ruleName.contains("PREPROC_")) { + typeOfText = TypeOfText.PREPROCESS_DIRECTIVE; + } + + if (typeOfText == null) { + throw new IllegalArgumentException("Unknown type of text for lexer rule name: " + ruleName); + } + + highlightingMap.put(ruleName, typeOfText); + } + return highlightingMap; + } + + private Map getHighlightingMapSDBL(Vocabulary vocabulary) { + + Set keywords = Set.of( + "ALL", + "ALLOWED", + "AND", + "AS", + "ASC", + "AUTOORDER", + "BETWEEN", + "BY_EN", + "CASE", + "CAST", + "DESC", + "DISTINCT", + "DROP", + "ELSE", + "END", + "ESCAPE", + "FALSE", + "FOR", + "FROM", + "FULL", + "GROUP", + "HAVING", + "HIERARCHY_EN", + "HIERARCHII_RU", + "HIERARCHYA_RU", + "IN", + "INDEX", + "INNER", + "INTO", + "IS", + "ISNULL", + "JOIN", + "LEFT", + "LIKE", + "NOT", + "OF", + "ON_EN", + "OR", + "ORDER", + "OUTER", + "OVERALL", + "PO_RU", + "RIGHT", + "SELECT", + "THEN", + "TOP", + "TOTALS", + "UNION", + "WHEN", + "WHERE", + "ONLY", + "PERIODS", + "REFS", + "UPDATE" + ); + + Set functions = Set.of( + "AVG", + "BEGINOFPERIOD", + "BOOLEAN", + "COUNT", + "DATE", + "DATEADD", + "DATEDIFF", + "DATETIME", + "DAY", + "DAYOFYEAR", + "EMPTYTABLE", + "ENDOFPERIOD", + "HALFYEAR", + "HOUR", + "MAX", + "MIN", + "MINUTE", + "MONTH", + "NUMBER", + "QUARTER", + "PRESENTATION", + "RECORDAUTONUMBER", + "REFPRESENTATION", + "SECOND", + "STRING", + "SUBSTRING", + "SUM", + "TENDAYS", + "TYPE", + "VALUE", + "VALUETYPE", + "WEEK", + "WEEKDAY", + "YEAR" + ); + + Set metadataTypes = Set.of( + "ACCOUNTING_REGISTER_TYPE", + "ACCUMULATION_REGISTER_TYPE", + "BUSINESS_PROCESS_TYPE", + "CALCULATION_REGISTER_TYPE", + "CATALOG_TYPE", + "CHART_OF_ACCOUNTS_TYPE", + "CHART_OF_CALCULATION_TYPES_TYPE", + "CHART_OF_CHARACTERISTIC_TYPES_TYPE", + "CONSTANT_TYPE", + "DOCUMENT_TYPE", + "DOCUMENT_JOURNAL_TYPE", + "ENUM_TYPE", + "EXCHANGE_PLAN_TYPE", + "EXTERNAL_DATA_SOURCE_TYPE", + "FILTER_CRITERION_TYPE", + "INFORMATION_REGISTER_TYPE", + "SEQUENCE_TYPE", + "TASK_TYPE" + ); + + Set virtualTables = Set.of( + "ACTUAL_ACTION_PERIOD_VT", + "BALANCE_VT", + "BALANCE_AND_TURNOVERS_VT", + "BOUNDARIES_VT", + "DR_CR_TURNOVERS_VT", + "EXT_DIMENSIONS_VT", + "RECORDS_WITH_EXT_DIMENSIONS_VT", + "SCHEDULE_DATA_VT", + "SLICEFIRST_VT", + "SLICELAST_VT", + "TASK_BY_PERFORMER_VT", + "TURNOVERS_VT" + ); + + Set literals = Set.of( + "TRUE", + "FALSE", + "UNDEFINED", + "NULL", + "DECIMAL", + "FLOAT" + ); + + Set separators = Set.of( + "SEMICOLON", + "PLUS", + "MINUS", + "MUL", + "QUOTIENT", + "ASSIGN", + "LESS_OR_EQUAL", + "LESS", + "NOT_EQUAL", + "GREATER_OR_EQUAL", + "GREATER", + "COMMA", + "BRACE", + "BRACE_START" + ); + + Set noOpTypes = Set.of( + "WHITE_SPACE", + "DOT", + "LPAREN", + "RPAREN", + "ROUTEPOINT_FIELD", + "IDENTIFIER", + "INCORRECT_IDENTIFIER", + "BRACE_IDENTIFIER", + "UNKNOWN", + "BAR" // TODO: Убрать из лексера + ); + + int maxTokenType = vocabulary.getMaxTokenType(); + + Map highlightingMap = new HashMap<>(); + for (int tokenType = 1; tokenType <= maxTokenType; tokenType++) { + String ruleName = vocabulary.getSymbolicName(tokenType); + // no need to check lexer fragments or invisible names. + if (ruleName == null) { + continue; + } + + TypeOfText typeOfText = null; + if (noOpTypes.contains(ruleName)) { + continue; + } else if (keywords.contains(ruleName)) { + typeOfText = TypeOfText.KEYWORD; + } else if (literals.contains(ruleName)) { + typeOfText = TypeOfText.CONSTANT; + } else if (separators.contains(ruleName)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (functions.contains(ruleName)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (metadataTypes.contains(ruleName)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (virtualTables.contains(ruleName)) { + typeOfText = TypeOfText.KEYWORD_LIGHT; + } else if (ruleName.equals("STR")) { + typeOfText = TypeOfText.STRING; + } else if (ruleName.contains("LINE_COMMENT")) { + typeOfText = TypeOfText.COMMENT; + } else if (ruleName.equals("AMPERSAND") || ruleName.equals("PARAMETER_IDENTIFIER")) { + typeOfText = TypeOfText.ANNOTATION; + } + + if (typeOfText == null) { + throw new IllegalArgumentException("Unknown type of text for lexer rule name: " + ruleName); + } + + highlightingMap.put(ruleName, typeOfText); + } + return highlightingMap; + } + +} diff --git a/src/test/java/com/github/_1c_syntax/bsl/sonar/Tools.java b/src/test/java/com/github/_1c_syntax/bsl/sonar/Tools.java index e1b56e3e..dd107a6a 100644 --- a/src/test/java/com/github/_1c_syntax/bsl/sonar/Tools.java +++ b/src/test/java/com/github/_1c_syntax/bsl/sonar/Tools.java @@ -23,38 +23,36 @@ import com.github._1c_syntax.bsl.sonar.language.BSLLanguage; import org.sonar.api.batch.fs.InputFile; -import org.sonar.api.batch.fs.internal.DefaultInputFile; import org.sonar.api.batch.fs.internal.TestInputFileBuilder; import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; -import java.nio.file.Paths; public class Tools { - public static InputFile inputFileBSL(String name, File baseDir) { + public static InputFile inputFileBSL(String name, File baseDir, String content) { + return TestInputFileBuilder.create("moduleKey", name) + .setModuleBaseDir(baseDir.toPath()) + .setType(InputFile.Type.MAIN) + .setLanguage(BSLLanguage.KEY) + .setCharset(StandardCharsets.UTF_8) + .setContents(content) + .initMetadata(content) + .build(); + } - File file = new File(baseDir.getAbsoluteFile(), name); - String content; - try { - content = readFile(file.toPath().toString()); - } catch (IOException e) { - content = "Значение = 1; Значение2 = 1;"; - } + public static InputFile inputFileBSL(String name, File baseDir) { - DefaultInputFile inputFile = TestInputFileBuilder.create("moduleKey", name) - .setModuleBaseDir(baseDir.toPath()) - //.setCharset(StandardCharsets.UTF_8) - .setType(InputFile.Type.MAIN) - .setLanguage(BSLLanguage.KEY) - .initMetadata(content) - .build(); - return inputFile; + File file = new File(baseDir.getAbsoluteFile(), name); + String content; + try { + content = Files.readString(file.toPath(), StandardCharsets.UTF_8); + } catch (IOException e) { + content = "Значение = 1; Значение2 = 1;"; } - private static String readFile(String path) throws IOException { - byte[] encoded = Files.readAllBytes(Paths.get(path)); - return new String(encoded); - } + return inputFileBSL(name, baseDir, content); + } }