rewrites = programs.get(programName);
- int start = interval.a;
- int stop = interval.b;
+ int start = interval.a();
+ int stop = interval.b();
// ensure start/end are in range
if (stop > tokens.size() - 1) stop = tokens.size() - 1;
diff --git a/src/main/java/org/antlr/v4/runtime/UnbufferedCharStream.java b/src/main/java/org/antlr/v4/runtime/UnbufferedCharStream.java
index 85e2b7b..81de47f 100644
--- a/src/main/java/org/antlr/v4/runtime/UnbufferedCharStream.java
+++ b/src/main/java/org/antlr/v4/runtime/UnbufferedCharStream.java
@@ -10,6 +10,7 @@
package org.antlr.v4.runtime;
import org.antlr.v4.runtime.misc.Interval;
+import org.antlr.v4.runtime.misc.NotNull;
import java.io.IOException;
import java.io.InputStream;
@@ -281,6 +282,7 @@ public int size() {
throw new UnsupportedOperationException("Unbuffered stream cannot know its size");
}
+ @NotNull
@Override
public String getSourceName() {
if (name == null || name.isEmpty()) {
@@ -290,25 +292,26 @@ public String getSourceName() {
return name;
}
+ @NotNull
@Override
- public String getText(Interval interval) {
- if (interval.a < 0 || interval.b < interval.a - 1) {
+ public String getText(@NotNull Interval interval) {
+ if (interval.a() < 0 || interval.b() < interval.a() - 1) {
throw new IllegalArgumentException("invalid interval");
}
int bufferStartIndex = getBufferStartIndex();
if (n > 0 && data[n - 1] == Character.MAX_VALUE) {
- if (interval.a + interval.length() > bufferStartIndex + n) {
+ if (interval.a() + interval.length() > bufferStartIndex + n) {
throw new IllegalArgumentException("the interval extends past the end of the stream");
}
}
- if (interval.a < bufferStartIndex || interval.b >= bufferStartIndex + n) {
+ if (interval.a() < bufferStartIndex || interval.b() >= bufferStartIndex + n) {
throw new UnsupportedOperationException("interval " + interval + " outside buffer: " +
bufferStartIndex + ".." + (bufferStartIndex + n - 1));
}
// convert from absolute to local index
- int i = interval.a - bufferStartIndex;
+ int i = interval.a() - bufferStartIndex;
return new String(data, i, interval.length());
}
diff --git a/src/main/java/org/antlr/v4/runtime/UnbufferedTokenStream.java b/src/main/java/org/antlr/v4/runtime/UnbufferedTokenStream.java
index 97d7e8e..8f9f0ef 100644
--- a/src/main/java/org/antlr/v4/runtime/UnbufferedTokenStream.java
+++ b/src/main/java/org/antlr/v4/runtime/UnbufferedTokenStream.java
@@ -79,6 +79,7 @@ public UnbufferedTokenStream(TokenSource tokenSource, int bufferSize) {
fill(1); // prime the pump
}
+ @NotNull
@Override
public Token get(int i) {
int bufferStartIndex = getBufferStartIndex();
@@ -89,6 +90,7 @@ public Token get(int i) {
return tokens[i - bufferStartIndex];
}
+ @NotNull
@Override
public Token LT(int i) {
if (i == -1) {
@@ -114,6 +116,7 @@ public int LA(int i) {
return LT(i).getType();
}
+ @NotNull
@Override
public TokenSource getTokenSource() {
return tokenSource;
@@ -127,7 +130,7 @@ public String getText() {
@NotNull
@Override
- public String getText(RuleContext ctx) {
+ public String getText(@NotNull RuleContext ctx) {
return getText(ctx.getSourceInterval());
}
@@ -282,6 +285,7 @@ public int size() {
throw new UnsupportedOperationException("Unbuffered stream cannot know its size");
}
+ @NotNull
@Override
public String getSourceName() {
return tokenSource.getSourceName();
@@ -289,12 +293,12 @@ public String getSourceName() {
@NotNull
@Override
- public String getText(Interval interval) {
+ public String getText(@NotNull Interval interval) {
int bufferStartIndex = getBufferStartIndex();
int bufferStopIndex = bufferStartIndex + tokens.length - 1;
- int start = interval.a;
- int stop = interval.b;
+ int start = interval.a();
+ int stop = interval.b();
if (start < bufferStartIndex || stop > bufferStopIndex) {
throw new UnsupportedOperationException("interval " + interval + " not in token buffer window: " +
bufferStartIndex + ".." + bufferStopIndex);
diff --git a/src/main/java/org/antlr/v4/runtime/atn/ATN.java b/src/main/java/org/antlr/v4/runtime/atn/ATN.java
index dd1973e..696fceb 100644
--- a/src/main/java/org/antlr/v4/runtime/atn/ATN.java
+++ b/src/main/java/org/antlr/v4/runtime/atn/ATN.java
@@ -128,7 +128,7 @@ public PredictionContext getCachedContext(PredictionContext context) {
}
public final DFA[] getDecisionToDFA() {
- assert decisionToDFA != null && decisionToDFA.length == decisionToState.size();
+ assert decisionToDFA.length == decisionToState.size();
return decisionToDFA;
}
@@ -220,7 +220,7 @@ public int getNumberOfDecisions() {
* The big difference is that with just the input, the parser could land
* right in the middle of a lookahead decision. Getting all
* possible tokens given a partial input stream is a separate
- * computation. See https://github.com/antlr/antlr4/issues/1428
+ * computation. See ...
*
* For this function, we are specifying an ATN state and call stack to
* compute what token(s) can come next and specifically: outside of a
diff --git a/src/main/java/org/antlr/v4/runtime/atn/ATNDeserializer.java b/src/main/java/org/antlr/v4/runtime/atn/ATNDeserializer.java
index e3a6b07..6eed1ec 100644
--- a/src/main/java/org/antlr/v4/runtime/atn/ATNDeserializer.java
+++ b/src/main/java/org/antlr/v4/runtime/atn/ATNDeserializer.java
@@ -984,7 +984,7 @@ private static int optimizeSets(ATN atn, boolean preserveOrder) {
IntervalSet matchSet = new IntervalSet();
for (int i = 0; i < setTransitions.getIntervals().size(); i++) {
Interval interval = setTransitions.getIntervals().get(i);
- for (int j = interval.a; j <= interval.b; j++) {
+ for (int j = interval.a(); j <= interval.b(); j++) {
Transition matchTransition = decision.getOptimizedTransition(j).target.getOptimizedTransition(0);
if (matchTransition instanceof NotSetTransition) {
throw new UnsupportedOperationException("Not yet implemented.");
@@ -1000,7 +1000,7 @@ private static int optimizeSets(ATN atn, boolean preserveOrder) {
newTransition = new AtomTransition(blockEndState, matchSet.getMinElement());
} else {
Interval matchInterval = matchSet.getIntervals().get(0);
- newTransition = new RangeTransition(blockEndState, matchInterval.a, matchInterval.b);
+ newTransition = new RangeTransition(blockEndState, matchInterval.a(), matchInterval.b());
}
} else {
newTransition = new SetTransition(blockEndState, matchSet);
diff --git a/src/main/java/org/antlr/v4/runtime/atn/ATNSerializer.java b/src/main/java/org/antlr/v4/runtime/atn/ATNSerializer.java
index 20b8001..d7902b5 100644
--- a/src/main/java/org/antlr/v4/runtime/atn/ATNSerializer.java
+++ b/src/main/java/org/antlr/v4/runtime/atn/ATNSerializer.java
@@ -381,7 +381,7 @@ private static void serializeSets(
for (IntervalSet set : sets) {
boolean containsEof = set.contains(Token.EOF);
- if (containsEof && set.getIntervals().get(0).b == Token.EOF) {
+ if (containsEof && set.getIntervals().get(0).b() == Token.EOF) {
data.add(set.getIntervals().size() - 1);
} else {
data.add(set.getIntervals().size());
@@ -389,17 +389,17 @@ private static void serializeSets(
data.add(containsEof ? 1 : 0);
for (Interval I : set.getIntervals()) {
- if (I.a == Token.EOF) {
- if (I.b == Token.EOF) {
+ if (I.a() == Token.EOF) {
+ if (I.b() == Token.EOF) {
continue;
} else {
codePointSerializer.serializeCodePoint(data, 0);
}
} else {
- codePointSerializer.serializeCodePoint(data, I.a);
+ codePointSerializer.serializeCodePoint(data, I.a());
}
- codePointSerializer.serializeCodePoint(data, I.b);
+ codePointSerializer.serializeCodePoint(data, I.b());
}
}
}
diff --git a/src/main/java/org/antlr/v4/runtime/atn/ParserATNSimulator.java b/src/main/java/org/antlr/v4/runtime/atn/ParserATNSimulator.java
index 1912fcf..c2366bf 100644
--- a/src/main/java/org/antlr/v4/runtime/atn/ParserATNSimulator.java
+++ b/src/main/java/org/antlr/v4/runtime/atn/ParserATNSimulator.java
@@ -2089,7 +2089,7 @@ public void dumpDeadEndConfigs(@NotNull NoViableAltException nvae) {
trans = "Atom " + getTokenName(at.label);
} else if (t instanceof SetTransition st) {
boolean not = st instanceof NotSetTransition;
- trans = (not ? "~" : "") + "Set " + st.set.toString();
+ trans = (not ? "~" : "") + "Set " + st.set;
}
}
System.err.println(c.toString(parser, true) + ":" + trans);
diff --git a/src/main/java/org/antlr/v4/runtime/atn/ProfilingATNSimulator.java b/src/main/java/org/antlr/v4/runtime/atn/ProfilingATNSimulator.java
index c8124c5..8eb06e1 100644
--- a/src/main/java/org/antlr/v4/runtime/atn/ProfilingATNSimulator.java
+++ b/src/main/java/org/antlr/v4/runtime/atn/ProfilingATNSimulator.java
@@ -18,6 +18,7 @@
import org.antlr.v4.runtime.misc.Pair;
import java.util.BitSet;
+import java.util.Objects;
/**
* @since 4.3
@@ -104,12 +105,13 @@ public int adaptivePredict(TokenStream input, int decision, ParserRuleContext ou
}
@Override
- protected SimulatorState getStartState(DFA dfa, TokenStream input, ParserRuleContext outerContext, boolean useContext) {
+ protected SimulatorState getStartState(@NotNull DFA dfa, @NotNull TokenStream input, @NotNull ParserRuleContext outerContext, boolean useContext) {
SimulatorState state = super.getStartState(dfa, input, outerContext, useContext);
currentState = state;
return state;
}
+ @NotNull
@Override
protected SimulatorState computeStartState(DFA dfa, ParserRuleContext globalContext, boolean useContext) {
SimulatorState state = super.computeStartState(dfa, globalContext, useContext);
@@ -132,7 +134,7 @@ protected SimulatorState computeReachSet(DFA dfa, SimulatorState previous, int t
}
@Override
- protected DFAState getExistingTargetState(DFAState previousD, int t) {
+ protected DFAState getExistingTargetState(@NotNull DFAState previousD, int t) {
// this method is called after each time the input position advances
if (currentState.useContext) {
_llStopIndex = _input.index();
@@ -163,9 +165,10 @@ protected DFAState getExistingTargetState(DFAState previousD, int t) {
return existingTargetState;
}
+ @NotNull
@Override
- protected Pair computeTargetState(DFA dfa,
- DFAState s,
+ protected Pair computeTargetState(@NotNull DFA dfa,
+ @NotNull DFAState s,
ParserRuleContext remainingGlobalContext,
int t,
boolean useContext,
@@ -182,7 +185,7 @@ protected Pair computeTargetState(DFA dfa,
}
@Override
- protected boolean evalSemanticContext(SemanticContext pred, ParserRuleContext parserCallStack, int alt) {
+ protected boolean evalSemanticContext(@NotNull SemanticContext pred, ParserRuleContext parserCallStack, int alt) {
boolean result = super.evalSemanticContext(pred, parserCallStack, alt);
if (!(pred instanceof SemanticContext.PrecedencePredicate)) {
boolean fullContext = _llStopIndex >= 0;
@@ -196,7 +199,7 @@ protected boolean evalSemanticContext(SemanticContext pred, ParserRuleContext pa
}
@Override
- protected void reportContextSensitivity(DFA dfa, int prediction, SimulatorState acceptState, int startIndex, int stopIndex) {
+ protected void reportContextSensitivity(@NotNull DFA dfa, int prediction, @NotNull SimulatorState acceptState, int startIndex, int stopIndex) {
if (prediction != conflictingAltResolvedBySLL) {
decisions[currentDecision].contextSensitivities.add(
new ContextSensitivityInfo(currentDecision, acceptState, _input, startIndex, stopIndex)
@@ -206,24 +209,18 @@ protected void reportContextSensitivity(DFA dfa, int prediction, SimulatorState
}
@Override
- protected void reportAttemptingFullContext(DFA dfa, BitSet conflictingAlts, SimulatorState conflictState, int startIndex, int stopIndex) {
- if (conflictingAlts != null) {
- conflictingAltResolvedBySLL = conflictingAlts.nextSetBit(0);
- } else {
- conflictingAltResolvedBySLL = conflictState.s0.configs.getRepresentedAlternatives().nextSetBit(0);
- }
+ protected void reportAttemptingFullContext(@NotNull DFA dfa, BitSet conflictingAlts, @NotNull SimulatorState conflictState, int startIndex, int stopIndex) {
+ conflictingAltResolvedBySLL = Objects.requireNonNullElseGet(
+ conflictingAlts,
+ conflictState.s0.configs::getRepresentedAlternatives)
+ .nextSetBit(0);
decisions[currentDecision].LL_Fallback++;
super.reportAttemptingFullContext(dfa, conflictingAlts, conflictState, startIndex, stopIndex);
}
@Override
protected void reportAmbiguity(@NotNull DFA dfa, DFAState D, int startIndex, int stopIndex, boolean exact, @NotNull BitSet ambigAlts, @NotNull ATNConfigSet configs) {
- int prediction;
- if (ambigAlts != null) {
- prediction = ambigAlts.nextSetBit(0);
- } else {
- prediction = configs.getRepresentedAlternatives().nextSetBit(0);
- }
+ int prediction = ambigAlts.nextSetBit(0);
if (conflictingAltResolvedBySLL != ATN.INVALID_ALT_NUMBER && prediction != conflictingAltResolvedBySLL) {
// Even though this is an ambiguity we are reporting, we can
// still detect some context sensitivities. Both SLL and LL
diff --git a/src/main/java/org/antlr/v4/runtime/dfa/AbstractEdgeMap.java b/src/main/java/org/antlr/v4/runtime/dfa/AbstractEdgeMap.java
index dc48b68..f9213f3 100644
--- a/src/main/java/org/antlr/v4/runtime/dfa/AbstractEdgeMap.java
+++ b/src/main/java/org/antlr/v4/runtime/dfa/AbstractEdgeMap.java
@@ -9,6 +9,8 @@
*/
package org.antlr.v4.runtime.dfa;
+import org.antlr.v4.runtime.misc.NotNull;
+
import java.util.AbstractSet;
import java.util.Map;
import java.util.Objects;
@@ -28,11 +30,13 @@ public AbstractEdgeMap(int minIndex, int maxIndex) {
this.maxIndex = maxIndex;
}
+ @NotNull
@Override
public abstract AbstractEdgeMap put(int key, T value);
+ @NotNull
@Override
- public AbstractEdgeMap putAll(EdgeMap extends T> m) {
+ public AbstractEdgeMap putAll(@NotNull EdgeMap extends T> m) {
AbstractEdgeMap result = this;
for (Map.Entry entry : m.entrySet()) {
result = result.put(entry.getKey(), entry.getValue());
@@ -41,9 +45,11 @@ public AbstractEdgeMap putAll(EdgeMap extends T> m) {
return result;
}
+ @NotNull
@Override
public abstract AbstractEdgeMap clear();
+ @NotNull
@Override
public abstract AbstractEdgeMap remove(int key);
diff --git a/src/main/java/org/antlr/v4/runtime/dfa/ArrayEdgeMap.java b/src/main/java/org/antlr/v4/runtime/dfa/ArrayEdgeMap.java
index 4f81234..78c9d62 100644
--- a/src/main/java/org/antlr/v4/runtime/dfa/ArrayEdgeMap.java
+++ b/src/main/java/org/antlr/v4/runtime/dfa/ArrayEdgeMap.java
@@ -9,10 +9,11 @@
*/
package org.antlr.v4.runtime.dfa;
+import org.antlr.v4.runtime.misc.NotNull;
+
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
-import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
@@ -59,6 +60,7 @@ public T get(int key) {
return arrayData.get(key - minIndex);
}
+ @NotNull
@Override
public ArrayEdgeMap put(int key, T value) {
if (key >= minIndex && key <= maxIndex) {
@@ -73,14 +75,16 @@ public ArrayEdgeMap put(int key, T value) {
return this;
}
+ @NotNull
@Override
public ArrayEdgeMap remove(int key) {
return put(key, null);
}
+ @NotNull
@Override
@SuppressWarnings("deprecation")
- public ArrayEdgeMap putAll(EdgeMap extends T> m) {
+ public ArrayEdgeMap putAll(@NotNull EdgeMap extends T> m) {
if (m.isEmpty()) {
return this;
}
@@ -112,27 +116,18 @@ public ArrayEdgeMap putAll(EdgeMap extends T> m) {
SingletonEdgeMap extends T> other = (SingletonEdgeMap extends T>) m;
assert !other.isEmpty();
return put(other.getKey(), other.getValue());
- } else if (m instanceof SparseEdgeMap>) {
- SparseEdgeMap extends T> other = (SparseEdgeMap extends T>) m;
- synchronized (other) {
- int[] keys = other.getKeys();
- List extends T> values = other.getValues();
- ArrayEdgeMap result = this;
- for (int i = 0; i < values.size(); i++) {
- result = result.put(keys[i], values.get(i));
- }
- return result;
- }
} else {
throw new UnsupportedOperationException(String.format("EdgeMap of type %s is supported yet.", m.getClass().getName()));
}
}
+ @NotNull
@Override
public EmptyEdgeMap clear() {
return new EmptyEdgeMap<>(minIndex, maxIndex);
}
+ @NotNull
@Override
public Map toMap() {
if (isEmpty()) {
@@ -152,6 +147,7 @@ public Map toMap() {
return result;
}
+ @NotNull
@Override
public Set> entrySet() {
return new EntrySet();
diff --git a/src/main/java/org/antlr/v4/runtime/dfa/DFA.java b/src/main/java/org/antlr/v4/runtime/dfa/DFA.java
index b14827a..9f30fe4 100644
--- a/src/main/java/org/antlr/v4/runtime/dfa/DFA.java
+++ b/src/main/java/org/antlr/v4/runtime/dfa/DFA.java
@@ -198,7 +198,7 @@ public final int getMaxDfaEdge() {
* overhead for storing the map of outgoing edges. The various
* implementations of {@link EdgeMap} use this range to determine the best
* memory savings will be obtained from sparse storage (e.g.
- * {@link SingletonEdgeMap} or {@link SparseEdgeMap}) or dense storage
+ * {@link SingletonEdgeMap} or dense storage
* ({@link ArrayEdgeMap}). Symbols values outside the range are supported
* during prediction, but since DFA edges are never created for these
* symbols they will always recompute the target state through a match and
diff --git a/src/main/java/org/antlr/v4/runtime/dfa/EmptyEdgeMap.java b/src/main/java/org/antlr/v4/runtime/dfa/EmptyEdgeMap.java
index 26b3fdc..b820031 100644
--- a/src/main/java/org/antlr/v4/runtime/dfa/EmptyEdgeMap.java
+++ b/src/main/java/org/antlr/v4/runtime/dfa/EmptyEdgeMap.java
@@ -9,6 +9,8 @@
*/
package org.antlr.v4.runtime.dfa;
+import org.antlr.v4.runtime.misc.NotNull;
+
import java.util.Collections;
import java.util.Map;
import java.util.Set;
@@ -24,6 +26,7 @@ public EmptyEdgeMap(int minIndex, int maxIndex) {
super(minIndex, maxIndex);
}
+ @NotNull
@Override
public AbstractEdgeMap put(int key, T value) {
if (value == null || key < minIndex || key > maxIndex) {
@@ -34,11 +37,13 @@ public AbstractEdgeMap put(int key, T value) {
return new SingletonEdgeMap<>(minIndex, maxIndex, key, value);
}
+ @NotNull
@Override
public AbstractEdgeMap clear() {
return this;
}
+ @NotNull
@Override
public AbstractEdgeMap remove(int key) {
return this;
@@ -64,11 +69,13 @@ public T get(int key) {
return null;
}
+ @NotNull
@Override
public Map toMap() {
return Collections.emptyMap();
}
+ @NotNull
@Override
public Set> entrySet() {
return Collections.emptyMap().entrySet();
diff --git a/src/main/java/org/antlr/v4/runtime/dfa/HashEdgeMap.java b/src/main/java/org/antlr/v4/runtime/dfa/HashEdgeMap.java
index 72720d8..3b69a15 100644
--- a/src/main/java/org/antlr/v4/runtime/dfa/HashEdgeMap.java
+++ b/src/main/java/org/antlr/v4/runtime/dfa/HashEdgeMap.java
@@ -116,6 +116,7 @@ public T get(int key) {
return value;
}
+ @NotNull
@Override
public AbstractEdgeMap put(int key, T value) {
if (key < minIndex || key > maxIndex) {
@@ -165,6 +166,7 @@ public AbstractEdgeMap put(int key, T value) {
}
}
+ @NotNull
@Override
public HashEdgeMap remove(int key) {
if (get(key) == null) {
@@ -178,6 +180,7 @@ public HashEdgeMap remove(int key) {
return result;
}
+ @NotNull
@Override
public AbstractEdgeMap clear() {
if (isEmpty()) {
@@ -187,6 +190,7 @@ public AbstractEdgeMap clear() {
return new EmptyEdgeMap<>(minIndex, maxIndex);
}
+ @NotNull
@Override
public Map toMap() {
if (isEmpty()) {
@@ -207,6 +211,7 @@ public Map toMap() {
}
}
+ @NotNull
@Override
public Set> entrySet() {
return toMap().entrySet();
diff --git a/src/main/java/org/antlr/v4/runtime/dfa/SingletonEdgeMap.java b/src/main/java/org/antlr/v4/runtime/dfa/SingletonEdgeMap.java
index e2bd6e5..6ceca4d 100644
--- a/src/main/java/org/antlr/v4/runtime/dfa/SingletonEdgeMap.java
+++ b/src/main/java/org/antlr/v4/runtime/dfa/SingletonEdgeMap.java
@@ -9,6 +9,8 @@
*/
package org.antlr.v4.runtime.dfa;
+import org.antlr.v4.runtime.misc.NotNull;
+
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
@@ -66,6 +68,7 @@ public T get(int key) {
return null;
}
+ @NotNull
@Override
public AbstractEdgeMap put(int key, T value) {
if (key < minIndex || key > maxIndex) {
@@ -84,6 +87,7 @@ public AbstractEdgeMap put(int key, T value) {
}
}
+ @NotNull
@Override
public AbstractEdgeMap remove(int key) {
if (key == this.key && this.value != null) {
@@ -93,6 +97,7 @@ public AbstractEdgeMap remove(int key) {
return this;
}
+ @NotNull
@Override
public AbstractEdgeMap clear() {
if (this.value != null) {
@@ -102,6 +107,7 @@ public AbstractEdgeMap clear() {
return this;
}
+ @NotNull
@Override
public Map toMap() {
if (isEmpty()) {
@@ -111,6 +117,7 @@ public Map toMap() {
return Collections.singletonMap(key, value);
}
+ @NotNull
@Override
public Set> entrySet() {
return new EntrySet();
diff --git a/src/main/java/org/antlr/v4/runtime/dfa/SparseEdgeMap.java b/src/main/java/org/antlr/v4/runtime/dfa/SparseEdgeMap.java
deleted file mode 100644
index 43c6040..0000000
--- a/src/main/java/org/antlr/v4/runtime/dfa/SparseEdgeMap.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/**
- * This file is a part of ANTLR.
- *
- * Copyright (c) 2012-2025 The ANTLR Project. All rights reserved.
- * Copyright (c) 2025 Valery Maximov and contributors
- *
- * Use of this file is governed by the BSD-3-Clause license that
- * can be found in the LICENSE.txt file in the project root.
- */
-package org.antlr.v4.runtime.dfa;
-
-import org.antlr.v4.runtime.misc.NotNull;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * @author Sam Harwell
- * @deprecated Use {@link HashEdgeMap} instead.
- */
-@Deprecated
-public final class SparseEdgeMap extends AbstractEdgeMap {
- private static final int DEFAULT_MAX_SIZE = 5;
-
- private final int[] keys;
- private final List values;
-
- public SparseEdgeMap(int minIndex, int maxIndex) {
- this(minIndex, maxIndex, DEFAULT_MAX_SIZE);
- }
-
- public SparseEdgeMap(int minIndex, int maxIndex, int maxSparseSize) {
- super(minIndex, maxIndex);
- this.keys = new int[maxSparseSize];
- this.values = new ArrayList<>(maxSparseSize);
- }
-
- private SparseEdgeMap(@NotNull SparseEdgeMap map, int maxSparseSize) {
- super(map.minIndex, map.maxIndex);
- synchronized (map) {
- if (maxSparseSize < map.values.size()) {
- throw new IllegalArgumentException();
- }
-
- keys = Arrays.copyOf(map.keys, maxSparseSize);
- values = new ArrayList<>(maxSparseSize);
- values.addAll(map.values);
- }
- }
-
- public int[] getKeys() {
- return keys;
- }
-
- public List getValues() {
- return values;
- }
-
- public int getMaxSparseSize() {
- return keys.length;
- }
-
- @Override
- public int size() {
- return values.size();
- }
-
- @Override
- public boolean isEmpty() {
- return values.isEmpty();
- }
-
- @Override
- public boolean containsKey(int key) {
- return get(key) != null;
- }
-
- @Override
- public T get(int key) {
- // Special property of this collection: values are only even added to
- // the end, else a new object is returned from put(). Therefore no lock
- // is required in this method.
- int index = Arrays.binarySearch(keys, 0, size(), key);
- if (index < 0) {
- return null;
- }
-
- return values.get(index);
- }
-
- @Override
- public AbstractEdgeMap put(int key, T value) {
- if (key < minIndex || key > maxIndex) {
- return this;
- }
-
- if (value == null) {
- return remove(key);
- }
-
- synchronized (this) {
- int index = Arrays.binarySearch(keys, 0, size(), key);
- if (index >= 0) {
- // replace existing entry
- values.set(index, value);
- return this;
- }
-
- assert index < 0 && value != null;
- int insertIndex = -index - 1;
- if (size() < getMaxSparseSize() && insertIndex == size()) {
- // stay sparse and add new entry
- keys[insertIndex] = key;
- values.add(value);
- return this;
- }
-
- int desiredSize = size() >= getMaxSparseSize() ? getMaxSparseSize() * 2 : getMaxSparseSize();
- int space = maxIndex - minIndex + 1;
- // SparseEdgeMap only uses less memory than ArrayEdgeMap up to half the size of the symbol space
- if (desiredSize >= space / 2) {
- ArrayEdgeMap arrayMap = new ArrayEdgeMap<>(minIndex, maxIndex);
- arrayMap = arrayMap.putAll(this);
- arrayMap.put(key, value);
- return arrayMap;
- } else {
- SparseEdgeMap resized = new SparseEdgeMap<>(this, desiredSize);
- System.arraycopy(resized.keys, insertIndex, resized.keys, insertIndex + 1, size() - insertIndex);
- resized.keys[insertIndex] = key;
- resized.values.add(insertIndex, value);
- return resized;
- }
- }
- }
-
- @Override
- public SparseEdgeMap remove(int key) {
- synchronized (this) {
- int index = Arrays.binarySearch(keys, 0, size(), key);
- if (index < 0) {
- return this;
- }
-
- SparseEdgeMap result = new SparseEdgeMap<>(this, getMaxSparseSize());
- System.arraycopy(result.keys, index + 1, result.keys, index, size() - index - 1);
- result.values.remove(index);
- return result;
- }
- }
-
- @Override
- public AbstractEdgeMap clear() {
- if (isEmpty()) {
- return this;
- }
-
- return new EmptyEdgeMap<>(minIndex, maxIndex);
- }
-
- @Override
- public Map toMap() {
- if (isEmpty()) {
- return Collections.emptyMap();
- }
-
- synchronized (this) {
- Map result = new LinkedHashMap<>();
- for (int i = 0; i < size(); i++) {
- result.put(keys[i], values.get(i));
- }
-
- return result;
- }
- }
-
- @Override
- public Set> entrySet() {
- return toMap().entrySet();
- }
-}
diff --git a/src/main/java/org/antlr/v4/runtime/misc/Interval.java b/src/main/java/org/antlr/v4/runtime/misc/Interval.java
index d64c976..db54eca 100644
--- a/src/main/java/org/antlr/v4/runtime/misc/Interval.java
+++ b/src/main/java/org/antlr/v4/runtime/misc/Interval.java
@@ -11,28 +11,17 @@
/**
* An immutable inclusive interval a..b
+ *
+ * @param a The start of the interval.
+ * @param b The end of the interval (inclusive).
*/
-public class Interval {
+public record Interval(int a, int b) {
public static final int INTERVAL_POOL_MAX_VALUE = 1000;
public static final Interval INVALID = new Interval(-1, -2);
private static final Interval[] cache = new Interval[INTERVAL_POOL_MAX_VALUE + 1];
- /**
- * The start of the interval.
- */
- public final int a;
- /**
- * The end of the interval (inclusive).
- */
- public final int b;
-
- public Interval(int a, int b) {
- this.a = a;
- this.b = b;
- }
-
/**
* Interval objects are used readonly so share all with the
* same single value a==b up to some max size. Use an array as a perfect hash.
@@ -64,20 +53,11 @@ public int length() {
public boolean equals(Object o) {
if (o == this) {
return true;
- } else if (!(o instanceof Interval)) {
+ } else if (!(o instanceof Interval other)) {
return false;
+ } else {
+ return this.a == other.a && this.b == other.b;
}
-
- Interval other = (Interval) o;
- return this.a == other.a && this.b == other.b;
- }
-
- @Override
- public int hashCode() {
- int hash = 23;
- hash = hash * 31 + a;
- hash = hash * 31 + b;
- return hash;
}
/**
@@ -169,6 +149,7 @@ else if (other.startsAfterNonDisjoint(this)) {
}
@Override
+ @NotNull
public String toString() {
return a + ".." + b;
}
diff --git a/src/main/java/org/antlr/v4/runtime/misc/IntervalSet.java b/src/main/java/org/antlr/v4/runtime/misc/IntervalSet.java
index a7adc5f..fa483cf 100644
--- a/src/main/java/org/antlr/v4/runtime/misc/IntervalSet.java
+++ b/src/main/java/org/antlr/v4/runtime/misc/IntervalSet.java
@@ -120,7 +120,7 @@ public void add(int a, int b) {
// copy on write so we can cache a..a intervals and sets of that
protected void add(Interval addition) {
if (readonly) throw new IllegalStateException("can't alter readonly IntervalSet");
- if (addition.b < addition.a) {
+ if (addition.b() < addition.a()) {
return;
}
// find position in list
@@ -184,7 +184,7 @@ public IntervalSet addAll(IntSet set) {
if (set instanceof IntervalSet other) {
// walk set and add each interval
for (Interval interval : other.intervals) {
- add(interval.a, interval.b);
+ add(interval.a(), interval.b());
}
} else {
for (int value : set.toList()) {
@@ -259,24 +259,24 @@ public static IntervalSet subtract(@Nullable IntervalSet left, @Nullable Interva
// operation: (resultInterval - rightInterval) and update indexes
- if (rightInterval.b < resultInterval.a) {
+ if (rightInterval.b() < resultInterval.a()) {
rightI++;
continue;
}
- if (rightInterval.a > resultInterval.b) {
+ if (rightInterval.a() > resultInterval.b()) {
resultI++;
continue;
}
Interval beforeCurrent = null;
Interval afterCurrent = null;
- if (rightInterval.a > resultInterval.a) {
- beforeCurrent = new Interval(resultInterval.a, rightInterval.a - 1);
+ if (rightInterval.a() > resultInterval.a()) {
+ beforeCurrent = new Interval(resultInterval.a(), rightInterval.a() - 1);
}
- if (rightInterval.b < resultInterval.b) {
- afterCurrent = new Interval(rightInterval.b + 1, resultInterval.b);
+ if (rightInterval.b() < resultInterval.b()) {
+ afterCurrent = new Interval(rightInterval.b() + 1, resultInterval.b());
}
if (beforeCurrent != null) {
@@ -397,8 +397,8 @@ public boolean contains(int el) {
while (l <= r) {
int m = (l + r) / 2;
Interval I = intervals.get(m);
- int a = I.a;
- int b = I.b;
+ int a = I.a();
+ int b = I.b();
if (b < el) {
l = m + 1;
} else if (a > el) {
@@ -425,8 +425,8 @@ public boolean isNil() {
public int getSingleElement() {
if (intervals != null && intervals.size() == 1) {
Interval I = intervals.get(0);
- if (I.a == I.b) {
- return I.a;
+ if (I.a() == I.b()) {
+ return I.a();
}
}
return Token.INVALID_TYPE;
@@ -443,7 +443,7 @@ public int getMaxElement() {
return Token.INVALID_TYPE;
}
Interval last = intervals.get(intervals.size() - 1);
- return last.b;
+ return last.b();
}
/**
@@ -457,7 +457,7 @@ public int getMinElement() {
return Token.INVALID_TYPE;
}
- return intervals.get(0).a;
+ return intervals.get(0).a();
}
/**
@@ -471,8 +471,8 @@ public List getIntervals() {
public int hashCode() {
int hash = MurmurHash.initialize();
for (Interval I : intervals) {
- hash = MurmurHash.update(hash, I.a);
- hash = MurmurHash.update(hash, I.b);
+ hash = MurmurHash.update(hash, I.a());
+ hash = MurmurHash.update(hash, I.b());
}
hash = MurmurHash.finish(hash, intervals.size() * 2);
@@ -509,8 +509,8 @@ public String toString(boolean elemAreChar) {
Iterator iter = this.intervals.iterator();
while (iter.hasNext()) {
Interval I = iter.next();
- int a = I.a;
- int b = I.b;
+ int a = I.a();
+ int b = I.b();
if (a == b) {
if (a == Token.EOF) buf.append("");
else if (elemAreChar) buf.append("'").appendCodePoint(a).append("'");
@@ -548,8 +548,8 @@ public String toString(@NotNull Vocabulary vocabulary) {
Iterator iter = this.intervals.iterator();
while (iter.hasNext()) {
Interval I = iter.next();
- int a = I.a;
- int b = I.b;
+ int a = I.a();
+ int b = I.b();
if (a == b) {
buf.append(elementName(vocabulary, a));
} else {
@@ -592,11 +592,11 @@ public int size() {
int numIntervals = intervals.size();
if (numIntervals == 1) {
Interval firstInterval = intervals.get(0);
- return firstInterval.b - firstInterval.a + 1;
+ return firstInterval.b() - firstInterval.a() + 1;
}
int n = 0;
for (Interval interval : intervals) {
- n += (interval.b - interval.a + 1);
+ n += (interval.b() - interval.a() + 1);
}
return n;
}
@@ -604,8 +604,8 @@ public int size() {
public IntegerList toIntegerList() {
IntegerList values = new IntegerList(size());
for (Interval interval : intervals) {
- int a = interval.a;
- int b = interval.b;
+ int a = interval.a();
+ int b = interval.b();
for (int v = a; v <= b; v++) {
values.add(v);
}
@@ -617,8 +617,8 @@ public IntegerList toIntegerList() {
public List toList() {
List values = new ArrayList<>(size());
for (Interval interval : intervals) {
- int a = interval.a;
- int b = interval.b;
+ int a = interval.a();
+ int b = interval.b();
for (int v = a; v <= b; v++) {
values.add(v);
}
@@ -629,8 +629,8 @@ public List toList() {
public Set toSet() {
Set s = new HashSet<>(size());
for (Interval I : intervals) {
- int a = I.a;
- int b = I.b;
+ int a = I.a();
+ int b = I.b();
for (int v = a; v <= b; v++) {
s.add(v);
}
@@ -647,8 +647,8 @@ public void remove(int el) {
if (readonly) throw new IllegalStateException("can't alter readonly IntervalSet");
for (int i = 0, n = intervals.size(); i < n; i++) {
Interval interval = intervals.get(i);
- int a = interval.a;
- int b = interval.b;
+ int a = interval.a();
+ int b = interval.b();
if (el < a) {
break; // list is sorted and el is before this interval; not here
}
@@ -659,18 +659,18 @@ public void remove(int el) {
}
// if on left edge x..b, adjust left
if (el == a) {
- intervals.set(i, Interval.of(interval.a + 1, interval.b));
+ intervals.set(i, Interval.of(interval.a() + 1, interval.b()));
break;
}
// if on right edge a..x, adjust right
if (el == b) {
- intervals.set(i, Interval.of(interval.a, interval.b - 1));
+ intervals.set(i, Interval.of(interval.a(), interval.b() - 1));
break;
}
// if in middle a..x..b, split interval
if (el < b) { // found in this interval
- int oldb = interval.b;
- intervals.set(i, Interval.of(interval.a, el - 1)); // [a..x-1]
+ int oldb = interval.b();
+ intervals.set(i, Interval.of(interval.a(), el - 1)); // [a..x-1]
add(el + 1, oldb); // add [x+1..b]
}
}
diff --git a/src/main/java/org/antlr/v4/runtime/misc/NotNull.java b/src/main/java/org/antlr/v4/runtime/misc/NotNull.java
index 8162880..3b9873c 100644
--- a/src/main/java/org/antlr/v4/runtime/misc/NotNull.java
+++ b/src/main/java/org/antlr/v4/runtime/misc/NotNull.java
@@ -9,6 +9,8 @@
*/
package org.antlr.v4.runtime.misc;
+import javax.annotation.meta.TypeQualifierNickname;
+import javax.annotation.meta.When;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@@ -47,5 +49,7 @@
@Documented
@Retention(RetentionPolicy.CLASS)
@Target({ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER, ElementType.LOCAL_VARIABLE})
+@javax.annotation.Nonnull(when = When.ALWAYS)
+@TypeQualifierNickname
public @interface NotNull {
}
diff --git a/src/main/java/org/antlr/v4/runtime/misc/Nullable.java b/src/main/java/org/antlr/v4/runtime/misc/Nullable.java
index 8cbea26..dc8b253 100644
--- a/src/main/java/org/antlr/v4/runtime/misc/Nullable.java
+++ b/src/main/java/org/antlr/v4/runtime/misc/Nullable.java
@@ -9,6 +9,8 @@
*/
package org.antlr.v4.runtime.misc;
+import javax.annotation.meta.TypeQualifierNickname;
+import javax.annotation.meta.When;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
@@ -47,5 +49,7 @@
@Documented
@Retention(RetentionPolicy.CLASS)
@Target({ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER, ElementType.LOCAL_VARIABLE})
+@javax.annotation.Nonnull(when = When.UNKNOWN)
+@TypeQualifierNickname
public @interface Nullable {
}
diff --git a/src/main/java/org/antlr/v4/runtime/tree/ParseTreeListener.java b/src/main/java/org/antlr/v4/runtime/tree/ParseTreeListener.java
index 9faea37..5b681cb 100644
--- a/src/main/java/org/antlr/v4/runtime/tree/ParseTreeListener.java
+++ b/src/main/java/org/antlr/v4/runtime/tree/ParseTreeListener.java
@@ -22,7 +22,7 @@
* If you want to trigger events in multiple listeners during a single
* tree walk, you can use the ParseTreeDispatcher object available at
*
- * https://github.com/antlr/antlr4/issues/841
+ * ...
*/
public interface ParseTreeListener {
void visitTerminal(@NotNull TerminalNode node);
diff --git a/src/main/java/org/antlr/v4/runtime/tree/Trees.java b/src/main/java/org/antlr/v4/runtime/tree/Trees.java
index c25c741..5489d85 100644
--- a/src/main/java/org/antlr/v4/runtime/tree/Trees.java
+++ b/src/main/java/org/antlr/v4/runtime/tree/Trees.java
@@ -246,7 +246,7 @@ public static void stripChildrenOutOfRange(ParserRuleContext t,
for (int i = 0; i < t.getChildCount(); i++) {
ParseTree child = t.getChild(i);
Interval range = child.getSourceInterval();
- if (child instanceof ParserRuleContext && (range.b < startIndex || range.a > stopIndex)) {
+ if (child instanceof ParserRuleContext && (range.b() < startIndex || range.a() > stopIndex)) {
if (isAncestorOf(child, root)) { // replace only if subtree doesn't have displayed root
CommonToken abbrev = new CommonToken(Token.INVALID_TYPE, "...");
t.children.set(i, new TerminalNodeImpl(abbrev));
diff --git a/src/main/java/org/antlr/v4/runtime/tree/xpath/XPathLexerErrorListener.java b/src/main/java/org/antlr/v4/runtime/tree/xpath/XPathLexerErrorListener.java
index 45ccf89..a7ba2e7 100644
--- a/src/main/java/org/antlr/v4/runtime/tree/xpath/XPathLexerErrorListener.java
+++ b/src/main/java/org/antlr/v4/runtime/tree/xpath/XPathLexerErrorListener.java
@@ -12,11 +12,12 @@
import org.antlr.v4.runtime.ANTLRErrorListener;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
+import org.antlr.v4.runtime.misc.NotNull;
public class XPathLexerErrorListener implements ANTLRErrorListener {
@Override
- public void syntaxError(Recognizer recognizer, T offendingSymbol,
- int line, int charPositionInLine, String msg,
+ public void syntaxError(@NotNull Recognizer recognizer, T offendingSymbol,
+ int line, int charPositionInLine, @NotNull String msg,
RecognitionException e) {
}
}
diff --git a/src/main/java/org/antlr/v4/semantics/AttributeChecks.java b/src/main/java/org/antlr/v4/semantics/AttributeChecks.java
index dfd0eff..e86ed71 100644
--- a/src/main/java/org/antlr/v4/semantics/AttributeChecks.java
+++ b/src/main/java/org/antlr/v4/semantics/AttributeChecks.java
@@ -170,28 +170,12 @@ public void attr(String expr, Token x) {
@Override
public void nonLocalAttr(String expr, Token x, Token y) {
- Rule r = g.getRule(x.getText());
- if (r == null) {
- errMgr.grammarError(ErrorType.UNDEFINED_RULE_IN_NONLOCAL_REF,
- g.fileName, x, x.getText(), y.getText(), expr);
- } else if (r.resolveToAttribute(y.getText(), null) == null) {
- errMgr.grammarError(ErrorType.UNKNOWN_RULE_ATTRIBUTE,
- g.fileName, y, y.getText(), x.getText(), expr);
-
- }
+ nonLocalAttributes(expr, x, y);
}
@Override
public void setNonLocalAttr(String expr, Token x, Token y, Token rhs) {
- Rule r = g.getRule(x.getText());
- if (r == null) {
- errMgr.grammarError(ErrorType.UNDEFINED_RULE_IN_NONLOCAL_REF,
- g.fileName, x, x.getText(), y.getText(), expr);
- } else if (r.resolveToAttribute(y.getText(), null) == null) {
- errMgr.grammarError(ErrorType.UNKNOWN_RULE_ATTRIBUTE,
- g.fileName, y, y.getText(), x.getText(), expr);
-
- }
+ nonLocalAttributes(expr, x, y);
}
@Override
@@ -240,4 +224,14 @@ public Rule isolatedRuleRef(String x) {
return null;
}
+ private void nonLocalAttributes(String expr, Token x, Token y) {
+ var r = g.getRule(x.getText());
+ if (r == null) {
+ errMgr.grammarError(ErrorType.UNDEFINED_RULE_IN_NONLOCAL_REF,
+ g.fileName, x, x.getText(), y.getText(), expr);
+ } else if (r.resolveToAttribute(y.getText(), null) == null) {
+ errMgr.grammarError(ErrorType.UNKNOWN_RULE_ATTRIBUTE,
+ g.fileName, y, y.getText(), x.getText(), expr);
+ }
+ }
}
diff --git a/src/main/java/org/antlr/v4/testgen/STGroupModelAdaptor.java b/src/main/java/org/antlr/v4/testgen/STGroupModelAdaptor.java
index a17370b..4459a32 100644
--- a/src/main/java/org/antlr/v4/testgen/STGroupModelAdaptor.java
+++ b/src/main/java/org/antlr/v4/testgen/STGroupModelAdaptor.java
@@ -21,8 +21,11 @@
public class STGroupModelAdaptor implements ModelAdaptor {
@Override
- public Object getProperty(Interpreter interp, ST self, STGroup o, Object property, String propertyName) throws STNoSuchPropertyException {
- STGroup group = o;
+ public Object getProperty(Interpreter interp,
+ ST self,
+ STGroup group,
+ Object property,
+ String propertyName) throws STNoSuchPropertyException {
if (group.isDictionary(propertyName)) {
return group.rawGetDictionary(propertyName);
}
diff --git a/src/main/java/org/antlr/v4/tool/ErrorType.java b/src/main/java/org/antlr/v4/tool/ErrorType.java
index 69c32e6..5c761a6 100644
--- a/src/main/java/org/antlr/v4/tool/ErrorType.java
+++ b/src/main/java/org/antlr/v4/tool/ErrorType.java
@@ -1230,7 +1230,7 @@ public enum ErrorType {
// Dependency sorting errors
- /** t1.g4 -> t2.g4 -> t3.g4 ->t1.g4 */
+ // t1.g4 -> t2.g4 -> t3.g4 ->t1.g4
//CIRCULAR_DEPENDENCY(200, "your grammars contain a circular dependency and cannot be sorted into a valid build order", ErrorSeverity.ERROR),
;
diff --git a/src/main/java/org/antlr/v4/tool/GrammarParserInterpreter.java b/src/main/java/org/antlr/v4/tool/GrammarParserInterpreter.java
index 804acc4..53c9e42 100644
--- a/src/main/java/org/antlr/v4/tool/GrammarParserInterpreter.java
+++ b/src/main/java/org/antlr/v4/tool/GrammarParserInterpreter.java
@@ -29,6 +29,7 @@
import org.antlr.v4.runtime.atn.RuleStartState;
import org.antlr.v4.runtime.atn.StarLoopEntryState;
import org.antlr.v4.runtime.misc.Interval;
+import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.tree.Trees;
import java.lang.reflect.Constructor;
@@ -352,22 +353,9 @@ public static List getLookaheadParseTrees(Grammar g,
parser.setErrorHandler(errorHandler);
parser.reset();
parser.addDecisionOverride(decision, startIndex, alt);
- ParserRuleContext tt = parser.parse(startRuleIndex);
- int stopTreeAt = stopIndex;
- if (errorHandler.firstErrorTokenIndex >= 0) {
- stopTreeAt = errorHandler.firstErrorTokenIndex; // cut off rest at first error
- }
- Interval overallRange = tt.getSourceInterval();
- if (stopTreeAt > overallRange.b) {
- // If we try to look beyond range of tree, stopTreeAt must be EOF
- // for which there is no EOF ref in grammar. That means tree
- // will not have node for stopTreeAt; limit to overallRange.b
- stopTreeAt = overallRange.b;
- }
- ParserRuleContext subtree =
- Trees.getRootOfSubtreeEnclosingRegion(tt,
- startIndex,
- stopTreeAt);
+ var tt = parser.parse(startRuleIndex);
+ int stopTreeAt = getStopTreeAt(stopIndex, errorHandler, tt);
+ var subtree = Trees.getRootOfSubtreeEnclosingRegion(tt, startIndex, stopTreeAt);
// Use higher of overridden decision tree or tree enclosing all tokens
if (Trees.isAncestorOf(parser.getOverrideDecisionRoot(), subtree)) {
subtree = parser.getOverrideDecisionRoot();
@@ -379,6 +367,21 @@ public static List getLookaheadParseTrees(Grammar g,
return trees;
}
+ private static int getStopTreeAt(int stopIndex, BailButConsumeErrorStrategy errorHandler, ParserRuleContext tt) {
+ int stopTreeAt = stopIndex;
+ if (errorHandler.firstErrorTokenIndex >= 0) {
+ stopTreeAt = errorHandler.firstErrorTokenIndex; // cut off rest at first error
+ }
+ Interval overallRange = tt.getSourceInterval();
+ if (stopTreeAt > overallRange.b()) {
+ // If we try to look beyond range of tree, stopTreeAt must be EOF
+ // for which there is no EOF ref in grammar. That means tree
+ // will not have node for stopTreeAt; limit to overallRange.b
+ stopTreeAt = overallRange.b();
+ }
+ return stopTreeAt;
+ }
+
/**
* Derive a new parser from an old one that has knowledge of the grammar.
* The Grammar object is used to correctly compute outer alternative
@@ -427,20 +430,20 @@ public static class BailButConsumeErrorStrategy extends DefaultErrorStrategy {
public int firstErrorTokenIndex = -1;
@Override
- public void recover(Parser recognizer, RecognitionException e) {
+ public void recover(@NotNull Parser recognizer, @NotNull RecognitionException e) {
int errIndex = recognizer.getInputStream().index();
if (firstErrorTokenIndex == -1) {
firstErrorTokenIndex = errIndex; // latch
}
-// System.err.println("recover: error at " + errIndex);
TokenStream input = recognizer.getInputStream();
if (input.index() < input.size() - 1) { // don't consume() eof
recognizer.consume(); // just kill this bad token and let it continue.
}
}
+ @NotNull
@Override
- public Token recoverInline(Parser recognizer) throws RecognitionException {
+ public Token recoverInline(@NotNull Parser recognizer) throws RecognitionException {
int errIndex = recognizer.getInputStream().index();
if (firstErrorTokenIndex == -1) {
firstErrorTokenIndex = errIndex; // latch
@@ -450,7 +453,7 @@ public Token recoverInline(Parser recognizer) throws RecognitionException {
}
@Override
- public void sync(Parser recognizer) {
+ public void sync(@NotNull Parser recognizer) {
} // don't consume anything; let it fail later
}
}
diff --git a/src/test/java/org/antlr/v4/test/tool/AbstractBaseTest.java b/src/test/java/org/antlr/v4/test/tool/AbstractBaseTest.java
index d554b31..6d58551 100644
--- a/src/test/java/org/antlr/v4/test/tool/AbstractBaseTest.java
+++ b/src/test/java/org/antlr/v4/test/tool/AbstractBaseTest.java
@@ -1008,11 +1008,13 @@ public int size() {
return types.size();
}
+ @NotNull
@Override
public String getSourceName() {
return UNKNOWN_SOURCE_NAME;
}
+ @NotNull
@Override
public Token LT(int i) {
CommonToken t;
@@ -1023,11 +1025,13 @@ public Token LT(int i) {
return t;
}
+ @NotNull
@Override
public Token get(int i) {
return new org.antlr.v4.runtime.CommonToken(types.get(i));
}
+ @NotNull
@Override
public TokenSource getTokenSource() {
return null;
@@ -1041,13 +1045,13 @@ public String getText() {
@NotNull
@Override
- public String getText(Interval interval) {
+ public String getText(@NotNull Interval interval) {
throw new UnsupportedOperationException("can't give strings");
}
@NotNull
@Override
- public String getText(RuleContext ctx) {
+ public String getText(@NotNull RuleContext ctx) {
throw new UnsupportedOperationException("can't give strings");
}
diff --git a/src/test/java/org/antlr/v4/test/tool/JavaUnicodeInputStream.java b/src/test/java/org/antlr/v4/test/tool/JavaUnicodeInputStream.java
index 7157f6b..606353f 100644
--- a/src/test/java/org/antlr/v4/test/tool/JavaUnicodeInputStream.java
+++ b/src/test/java/org/antlr/v4/test/tool/JavaUnicodeInputStream.java
@@ -49,13 +49,15 @@ public int index() {
return source.index();
}
+ @NotNull
@Override
public String getSourceName() {
return source.getSourceName();
}
+ @NotNull
@Override
- public String getText(Interval interval) {
+ public String getText(@NotNull Interval interval) {
return source.getText(interval);
}
diff --git a/src/test/java/org/antlr/v4/test/tool/PerformanceTest.java b/src/test/java/org/antlr/v4/test/tool/PerformanceTest.java
index 6848c67..1756abe 100644
--- a/src/test/java/org/antlr/v4/test/tool/PerformanceTest.java
+++ b/src/test/java/org/antlr/v4/test/tool/PerformanceTest.java
@@ -1590,7 +1590,7 @@ public int adaptivePredict(TokenStream input, int decision, ParserRuleContext ou
}
@Override
- public int adaptivePredict(TokenStream input, int decision, ParserRuleContext outerContext, boolean useContext) {
+ public int adaptivePredict(@NotNull TokenStream input, int decision, ParserRuleContext outerContext, boolean useContext) {
if (useContext) {
fullContextFallback[decision]++;
}
@@ -1604,9 +1604,10 @@ protected DFAState getExistingTargetState(DFAState previousD, int t) {
return super.getExistingTargetState(previousD, t);
}
+ @NotNull
@Override
- protected Pair computeTargetState(DFA dfa,
- DFAState s,
+ protected Pair computeTargetState(@NotNull DFA dfa,
+ @NotNull DFAState s,
ParserRuleContext remainingGlobalContext,
int t,
boolean useContext,
@@ -1631,7 +1632,7 @@ private static class DescriptiveErrorListener extends BaseErrorListener {
public static DescriptiveErrorListener INSTANCE = new DescriptiveErrorListener();
@Override
- public void syntaxError(Recognizer recognizer, T offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) {
+ public void syntaxError(@NotNull Recognizer recognizer, T offendingSymbol, int line, int charPositionInLine, @NotNull String msg, RecognitionException e) {
if (!REPORT_SYNTAX_ERRORS) {
return;
}
@@ -1650,7 +1651,7 @@ private static class DescriptiveLexerErrorListener implements ANTLRErrorListener
public static DescriptiveLexerErrorListener INSTANCE = new DescriptiveLexerErrorListener();
@Override
- public void syntaxError(Recognizer recognizer, T offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) {
+ public void syntaxError(@NotNull Recognizer recognizer, T offendingSymbol, int line, int charPositionInLine, @NotNull String msg, RecognitionException e) {
if (!REPORT_SYNTAX_ERRORS) {
return;
}
@@ -1958,26 +1959,26 @@ public ChecksumParseTreeListener(MurmurHashChecksum checksum) {
}
@Override
- public void visitTerminal(TerminalNode node) {
+ public void visitTerminal(@NotNull TerminalNode node) {
checksum.update(VISIT_TERMINAL);
updateChecksum(checksum, node.getSymbol());
}
@Override
- public void visitErrorNode(ErrorNode node) {
+ public void visitErrorNode(@NotNull ErrorNode node) {
checksum.update(VISIT_ERROR_NODE);
updateChecksum(checksum, node.getSymbol());
}
@Override
- public void enterEveryRule(ParserRuleContext ctx) {
+ public void enterEveryRule(@NotNull ParserRuleContext ctx) {
checksum.update(ENTER_RULE);
updateChecksum(checksum, ctx.getRuleIndex());
updateChecksum(checksum, ctx.getStart());
}
@Override
- public void exitEveryRule(ParserRuleContext ctx) {
+ public void exitEveryRule(@NotNull ParserRuleContext ctx) {
checksum.update(EXIT_RULE);
updateChecksum(checksum, ctx.getRuleIndex());
updateChecksum(checksum, ctx.getStop());