diff --git a/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/DTRAMTextDocumentService.java b/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/DTRAMTextDocumentService.java index 995164b..adfcb25 100644 --- a/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/DTRAMTextDocumentService.java +++ b/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/DTRAMTextDocumentService.java @@ -10,12 +10,11 @@ import org.nittalab.dtram.languageserver.utils.SemanticAnalyzer; import org.nittalab.dtram.languageserver.utils.Tokenizer; -import java.io.BufferedReader; import java.io.IOException; import java.net.URI; +import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; @@ -26,12 +25,20 @@ */ private List supportedTokenTypes = new ArrayList<>(); + /** + * Current source code on the editor. + */ + private String sourceText = null; + + @Override public void didOpen(DidOpenTextDocumentParams params) { + sourceText = params.getTextDocument().getText(); } @Override public void didChange(DidChangeTextDocumentParams params) { + sourceText = params.getContentChanges().getFirst().getText(); } @Override @@ -40,6 +47,7 @@ @Override public void didSave(DidSaveTextDocumentParams params) { + sourceText = params.getText(); } @Override @@ -57,58 +65,55 @@ @Override public CompletableFuture semanticTokensFull(SemanticTokensParams params) { - return CompletableFutures.computeAsync(checker -> { - /* We don't have to do unnecessary jobs */ - if (checker.isCanceled()) { - return new SemanticTokens(); - } - - /* Analyze and get semantic tokens */ - SemanticTokens semanticTokens = new SemanticTokens(); - - Path filePath = Paths.get(URI.create(params.getTextDocument().getUri())); - try (BufferedReader br = Files.newBufferedReader(filePath)) { - List tokens = SemanticAnalyzer.analyze(Tokenizer.tokenize(br)); - - List tokenData = new ArrayList<>(); - int prevLine = 0; - int columnOffset = 0; - for (Token token : tokens) { - if (token instanceof SemanticToken semanticToken) { - int tokenType = supportedTokenTypes.indexOf(semanticToken.getSemanticType()); - - /* Skip the token if the token type is not supported by the client. */ - if (tokenType == -1) { - continue; - } - - /* Prepare data of the semantic token */ - int tokenLine = semanticToken.getStartPos().getLine() - 1; - int tokenColumn = semanticToken.getStartPos().getColumn() - 1; - if (tokenLine == prevLine) { - tokenLine -= prevLine; - tokenColumn -= columnOffset; - } else { - tokenLine -= prevLine; - prevLine = semanticToken.getStartPos().getLine() - 1; - } - tokenData.add(tokenLine); // line - tokenData.add(tokenColumn); // deltaStartChar - tokenData.add(semanticToken.getText().length()); // length - tokenData.add(tokenType); // tokenType - tokenData.add(0); // tokenModifiers - - columnOffset = semanticToken.getStartPos().getColumn() - 1; - } - } - semanticTokens.setData(tokenData); - } catch (IOException e) { + return CompletableFuture.supplyAsync(() -> { + try { + Path path = Path.of(new URI(params.getTextDocument().getUri())); + sourceText = Files.readString(path); + // Send results to the client + return updateSemanticTokens(sourceText, supportedTokenTypes); + } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } - return semanticTokens; // Send result to the client }); } + private SemanticTokens updateSemanticTokens(String sourceText, List tokenTypes) throws IOException { + List tokenData = new ArrayList<>(); + + List tokens = SemanticAnalyzer.analyze(Tokenizer.tokenize(sourceText)); + int prevLine = 0; + int columnOffset = 0; + for (Token token : tokens) { + if (token instanceof SemanticToken semanticToken) { + int tokenType = tokenTypes.indexOf(semanticToken.getSemanticType()); + + /* Skip the token if the token type is not supported by the client. */ + if (tokenType == -1) { + continue; + } + + /* Prepare data of the semantic token */ + int tokenLine = semanticToken.getStartPos().getLine() - 1; + int tokenColumn = semanticToken.getStartPos().getColumn() - 1; + if (tokenLine == prevLine) { + tokenLine -= prevLine; + tokenColumn -= columnOffset; + } else { + tokenLine -= prevLine; + prevLine = semanticToken.getStartPos().getLine() - 1; + } + tokenData.add(tokenLine); // line + tokenData.add(tokenColumn); // deltaStartChar + tokenData.add(semanticToken.getText().length()); // length + tokenData.add(tokenType); // tokenType + tokenData.add(0); // tokenModifiers + + columnOffset = semanticToken.getStartPos().getColumn() - 1; + } + } + return new SemanticTokens(tokenData); + } + /** * Updates the supported token types by the client. * diff --git a/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/utils/SemanticAnalyzer.java b/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/utils/SemanticAnalyzer.java index 5f1e4ae..cd2e157 100644 --- a/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/utils/SemanticAnalyzer.java +++ b/LanguageServer/src/main/java/org/nittalab/dtram/languageserver/utils/SemanticAnalyzer.java @@ -27,7 +27,10 @@ tokens = analyzeStringTokens(tokens); tokens = analyzeTypeTokens(tokens); tokens = analyzeKeywordTokens(tokens); - // TODO: Analyze functions and constants at least. + tokens = analyzeSymbolTokens(tokens); + tokens = analyzeAssignedSymbolTokens(tokens); + tokens = analyzeChannelNameTokens(tokens); + tokens = analyzeArgumentTokens(tokens); return tokens; } @@ -66,11 +69,10 @@ continue; } switch (token.getText()) { - case Operators.ADD, Operators.SUB, Operators.MUL, Operators.DIV, Operators.MOD, - Operators.EQ, Operators.NEQ, - Operators.GT, Operators.LT, Operators.GE, Operators.LE, - Operators.AND, Operators.OR, Operators.NEG, - Operators.ASSIGNMENT -> newTokens.add(new SemanticToken(SemanticTokenTypes.Operator, token)); + case Operators.ADD, Operators.SUB, Operators.MUL, Operators.DIV, Operators.MOD, Operators.EQ, + Operators.NEQ, Operators.GT, Operators.LT, Operators.GE, Operators.LE, Operators.AND, Operators.OR, + Operators.NEG, Operators.ASSIGNMENT -> + newTokens.add(new SemanticToken(SemanticTokenTypes.Operator, token)); case Tokens.DOUBLE_QUOT -> newTokens.add(new SemanticToken(SemanticTokenTypes.String, token)); default -> { if (token.getText().startsWith(Comments.COMMENT)) { // Single-line comment @@ -138,8 +140,8 @@ } if (wasColon) { switch (token.getText()) { - case Types.INTEGER, Types.LONG, Types.FLOAT, Types.DOUBLE, Types.BOOLEAN, Types.STRING, - Types.LIST, Types.PAIR, Types.TUPLE, Types.MAP, Types.JSON -> { + case Types.INTEGER, Types.LONG, Types.FLOAT, Types.DOUBLE, Types.BOOLEAN, Types.STRING, Types.LIST, + Types.PAIR, Types.TUPLE, Types.MAP, Types.JSON -> { SemanticToken newToken = new SemanticToken(SemanticTokenTypes.Type, token); newTokens.add(newToken); } @@ -169,8 +171,8 @@ continue; } switch (token.getText()) { - case Keywords.INIT, Keywords.CHANNEL, Keywords.NATIVE, - Keywords.IN, Keywords.REF, Keywords.OUT, Keywords.SUB_CHANNEL -> { + case Keywords.INIT, Keywords.CHANNEL, Keywords.NATIVE, Keywords.IN, Keywords.REF, Keywords.OUT, + Keywords.SUB_CHANNEL -> { SemanticToken newToken = new SemanticToken(SemanticTokenTypes.Keyword, token); newTokens.add(newToken); } @@ -179,4 +181,164 @@ } return newTokens; } + + /** + * Analyzes channel name tokens. + * + * @param tokens The tokens to be analyzed + * @return The tokens with analyzed type tokens. + * @author Shohei Yamagiwa + * @since 0.1 + */ + protected static List analyzeChannelNameTokens(List tokens) { + List newTokens = new ArrayList<>(); + + boolean channelNameExpected = false; + for (Token token : tokens) { + if (token instanceof SemanticToken semanticToken) { + if (semanticToken.getText().equals(Keywords.CHANNEL) || semanticToken.getText().equals(Keywords.SUB_CHANNEL)) { + channelNameExpected = true; + } + newTokens.add(semanticToken); + continue; + } + if (token.getText().equals(Tokens.LEFT_BRACKET) || token.getText().equals(Tokens.LEFT_CURLY_BRACKET)) { + channelNameExpected = false; + } + if (channelNameExpected) { + SemanticToken newToken = new SemanticToken(SemanticTokenTypes.Class, token); + newTokens.add(newToken); + } else { + newTokens.add(token); + } + } + return newTokens; + } + + /** + * Analyzes symbol tokens. + * + * @param tokens The tokens to be analyzed + * @return The tokens with analyzed type tokens. + * @author Shohei Yamagiwa + * @since 0.1 + */ + protected static List analyzeSymbolTokens(List tokens) { + List newTokens = new ArrayList<>(); + + boolean symbolExpected = false; + for (Token token : tokens) { + if (token instanceof SemanticToken semanticToken) { + if (semanticToken.getText().equals(Keywords.IN) || semanticToken.getText().equals(Keywords.REF) || semanticToken.getText().equals(Keywords.OUT)) { + symbolExpected = true; + } + newTokens.add(semanticToken); + continue; + } + if (token.getText().equals(Tokens.LEFT_BRACKET)) { + symbolExpected = false; + } + if (symbolExpected) { + SemanticToken newToken = new SemanticToken(SemanticTokenTypes.Function, token); + newTokens.add(newToken); + } else { + newTokens.add(token); + } + } + return newTokens; + } + + /** + * Analyzes symbol tokens. + * + * @param tokens The tokens to be analyzed + * @return The tokens with analyzed type tokens. + * @author Shohei Yamagiwa + * @since 0.1 + */ + protected static List analyzeAssignedSymbolTokens(List tokens) { + List newTokens = new ArrayList<>(); + + boolean symbolExpected = false; + for (Token token : tokens) { + if (token instanceof SemanticToken semanticToken) { + if (semanticToken.getText().equals(Operators.ASSIGNMENT)) { + symbolExpected = true; + } + newTokens.add(semanticToken); + continue; + } + if (symbolExpected) { + Token nextToken = tokens.indexOf(token) + 1 <= tokens.size() - 1 ? tokens.get(tokens.indexOf(token) + 1) : null; + if (nextToken == null) { + SemanticToken newToken = new SemanticToken(SemanticTokenTypes.Parameter, token); + newTokens.add(newToken); + break; + } + + SemanticToken newToken; + if (nextToken.getText().equals(Tokens.LEFT_BRACKET)) { + newToken = new SemanticToken(SemanticTokenTypes.Function, token); // Token is a function symbol + } else { + newToken = new SemanticToken(SemanticTokenTypes.Parameter, token); // Token is a parameter of the function + } + newTokens.add(newToken); + symbolExpected = false; + } else { + newTokens.add(token); + } + } + return newTokens; + } + + /** + * Analyzes argument tokens. + * + * @param tokens The tokens to be analyzed + * @return The tokens with analyzed type tokens. + * @author Shohei Yamagiwa + * @since 0.1 + */ + protected static List analyzeArgumentTokens(List tokens) { + List newTokens = new ArrayList<>(); + + boolean insideBracket = false; + for (Token token : tokens) { + if (token instanceof SemanticToken) { + newTokens.add(token); + continue; + } + switch (token.getText()) { + case Tokens.LEFT_BRACKET -> { + newTokens.add(token); + insideBracket = true; + } + case Tokens.RIGHT_BRACKET -> { + newTokens.add(token); + insideBracket = false; + } + default -> { + if (insideBracket && !token.getText().equals(Tokens.COLON)) { + Token nextToken = tokens.indexOf(token) + 1 <= tokens.size() - 1 ? tokens.get(tokens.indexOf(token) + 1) : null; + if (nextToken == null) { + SemanticToken newToken = new SemanticToken(SemanticTokenTypes.Parameter, token); + newTokens.add(newToken); + break; + } + + SemanticToken newToken; + if (nextToken.getText().equals(Tokens.LEFT_BRACKET)) { + newToken = new SemanticToken(SemanticTokenTypes.Function, token); // Token is a function symbol + } else { + newToken = new SemanticToken(SemanticTokenTypes.Parameter, token); // Token is a parameter of the function + } + newTokens.add(newToken); + } else { + newTokens.add(token); + } + } + } + } + return newTokens; + } }