comment
stringlengths 16
8.84k
| method_body
stringlengths 37
239k
| target_code
stringlengths 0
242
| method_body_after
stringlengths 29
239k
| context_before
stringlengths 14
424k
| context_after
stringlengths 14
284k
|
---|---|---|---|---|---|
Shall we simplify to `return !flagSet.contains(Flag.PUBLIC);` | private boolean isIsolationInferableFunction(BLangFunction funcNode) {
Set<Flag> flagSet = funcNode.flagSet;
if (flagSet.contains(Flag.INTERFACE)) {
return false;
}
if (!flagSet.contains(Flag.ATTACHED)) {
if (flagSet.contains(Flag.PUBLIC)) {
return false;
}
return true;
}
BSymbol owner = funcNode.symbol.owner;
if (!Symbols.isFlagOn(owner.flags, Flags.PUBLIC)) {
return true;
}
if (!(owner instanceof BClassSymbol)) {
return false;
}
BClassSymbol ownerClassSymbol = (BClassSymbol) owner;
return ownerClassSymbol.isServiceDecl || Symbols.isFlagOn(ownerClassSymbol.flags, Flags.OBJECT_CTOR);
} | if (flagSet.contains(Flag.PUBLIC)) { | private boolean isIsolationInferableFunction(BLangFunction funcNode) {
Set<Flag> flagSet = funcNode.flagSet;
if (flagSet.contains(Flag.INTERFACE)) {
return false;
}
if (!flagSet.contains(Flag.ATTACHED)) {
return !flagSet.contains(Flag.PUBLIC);
}
BSymbol owner = funcNode.symbol.owner;
if (!Symbols.isFlagOn(owner.flags, Flags.PUBLIC)) {
return true;
}
if (!(owner instanceof BClassSymbol)) {
return false;
}
BClassSymbol ownerClassSymbol = (BClassSymbol) owner;
return ownerClassSymbol.isServiceDecl || Symbols.isFlagOn(ownerClassSymbol.flags, Flags.OBJECT_CTOR);
} | class variable
if (env.node.getKind() != NodeKind.EXPR_FUNCTION_BODY ||
env.enclEnv.node.getKind() != NodeKind.ARROW_EXPR) {
return false;
} | class variable
if (isNotInArrowFunctionBody(env)) {
return false;
} |
Under the hood `isNotEmpty()`, `isEmpty()` check for is not null, also could be joined ```suggestion assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets()) .isNotEmpty() .isEqualTo(expectedSubscribedPartitionsWithStartOffsets); ``` | public void testRestoreFromEmptyStateWithPartitions() throws Exception {
final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.initializeState(
OperatorSnapshotUtil.getResourceFilename(
"kafka-consumer-migration-test-flink"
+ testMigrateVersion
+ "-empty-state-snapshot"));
testHarness.open();
final HashMap<KafkaTopicPartition, Long> expectedSubscribedPartitionsWithStartOffsets =
new HashMap<>();
for (KafkaTopicPartition partition : PARTITION_STATE.keySet()) {
expectedSubscribedPartitionsWithStartOffsets.put(
partition, KafkaTopicPartitionStateSentinel.EARLIEST_OFFSET);
}
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets()).isNotNull();
assertThat(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty()).isTrue();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets())
.isEqualTo(expectedSubscribedPartitionsWithStartOffsets);
assertThat(consumerFunction.getRestoredState()).isNotNull();
assertThat(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty()).isTrue();
for (Map.Entry<KafkaTopicPartition, Long> expectedEntry :
expectedSubscribedPartitionsWithStartOffsets.entrySet()) {
assertThat(consumerFunction.getRestoredState().get(expectedEntry.getKey()))
.isEqualTo(expectedEntry.getValue());
}
consumerOperator.close();
consumerOperator.cancel();
} | assertThat(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty()).isTrue(); | public void testRestoreFromEmptyStateWithPartitions() throws Exception {
final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.initializeState(
OperatorSnapshotUtil.getResourceFilename(
"kafka-consumer-migration-test-flink"
+ testMigrateVersion
+ "-empty-state-snapshot"));
testHarness.open();
final HashMap<KafkaTopicPartition, Long> expectedSubscribedPartitionsWithStartOffsets =
new HashMap<>();
for (KafkaTopicPartition partition : PARTITION_STATE.keySet()) {
expectedSubscribedPartitionsWithStartOffsets.put(
partition, KafkaTopicPartitionStateSentinel.EARLIEST_OFFSET);
}
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets())
.isNotEmpty()
.isEqualTo(expectedSubscribedPartitionsWithStartOffsets);
assertThat(consumerFunction.getRestoredState()).isNotNull();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets()).isNotEmpty();
for (Map.Entry<KafkaTopicPartition, Long> expectedEntry :
expectedSubscribedPartitionsWithStartOffsets.entrySet()) {
assertThat(consumerFunction.getRestoredState())
.containsEntry(expectedEntry.getKey(), expectedEntry.getValue());
}
consumerOperator.close();
consumerOperator.cancel();
} | class FlinkKafkaConsumerBaseMigrationTest {
/**
* TODO change this to the corresponding savepoint version to be written (e.g. {@link
* FlinkVersion
* methods to generate savepoints TODO Note: You should generate the savepoint based on the
* release branch instead of the master.
*/
private final FlinkVersion flinkGenerateSavepointVersion = null;
private static final HashMap<KafkaTopicPartition, Long> PARTITION_STATE = new HashMap<>();
static {
PARTITION_STATE.put(new KafkaTopicPartition("abc", 13), 16768L);
PARTITION_STATE.put(new KafkaTopicPartition("def", 7), 987654321L);
}
private static final List<String> TOPICS =
new ArrayList<>(PARTITION_STATE.keySet())
.stream().map(p -> p.getTopic()).distinct().collect(Collectors.toList());
private final FlinkVersion testMigrateVersion;
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<FlinkVersion> parameters() {
return FlinkVersion.rangeOf(FlinkVersion.v1_4, FlinkVersion.v1_15);
}
public FlinkKafkaConsumerBaseMigrationTest(FlinkVersion testMigrateVersion) {
this.testMigrateVersion = testMigrateVersion;
}
/** Manually run this to write binary snapshot data. */
@Ignore
@Test
public void writeSnapshot() throws Exception {
writeSnapshot(
"src/test/resources/kafka-consumer-migration-test-flink"
+ flinkGenerateSavepointVersion
+ "-snapshot",
PARTITION_STATE);
final HashMap<KafkaTopicPartition, Long> emptyState = new HashMap<>();
writeSnapshot(
"src/test/resources/kafka-consumer-migration-test-flink"
+ flinkGenerateSavepointVersion
+ "-empty-state-snapshot",
emptyState);
}
private void writeSnapshot(String path, HashMap<KafkaTopicPartition, Long> state)
throws Exception {
final OneShotLatch latch = new OneShotLatch();
final AbstractFetcher<String, ?> fetcher = mock(AbstractFetcher.class);
doAnswer(
new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
latch.trigger();
return null;
}
})
.when(fetcher)
.runFetchLoop();
when(fetcher.snapshotCurrentState()).thenReturn(state);
final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
fetcher,
TOPICS,
partitions,
FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.open();
final Throwable[] error = new Throwable[1];
Thread runner =
new Thread() {
@Override
public void run() {
try {
consumerFunction.run(
new DummySourceContext() {
@Override
public void collect(String element) {}
});
} catch (Throwable t) {
t.printStackTrace();
error[0] = t;
}
}
};
runner.start();
if (!latch.isTriggered()) {
latch.await();
}
final OperatorSubtaskState snapshot;
synchronized (testHarness.getCheckpointLock()) {
snapshot = testHarness.snapshot(0L, 0L);
}
OperatorSnapshotUtil.writeStateHandle(snapshot, path);
consumerOperator.close();
runner.join();
}
/** Test restoring from an legacy empty state, when no partitions could be found for topics. */
@Test
public void testRestoreFromEmptyStateNoPartitions() throws Exception {
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
Collections.singletonList("dummy-topic"),
Collections.<KafkaTopicPartition>emptyList(),
FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.initializeState(
OperatorSnapshotUtil.getResourceFilename(
"kafka-consumer-migration-test-flink"
+ testMigrateVersion
+ "-empty-state-snapshot"));
testHarness.open();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets()).isNotNull();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets()).isEmpty();
assertThat(consumerFunction.getRestoredState()).isEmpty();
consumerOperator.close();
consumerOperator.cancel();
}
/**
* Test restoring from an empty state taken using a previous Flink version, when some partitions
* could be found for topics.
*/
@Test
/**
* Test restoring from a non-empty state taken using a previous Flink version, when some
* partitions could be found for topics.
*/
@Test
public void testRestore() throws Exception {
final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.initializeState(
OperatorSnapshotUtil.getResourceFilename(
"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));
testHarness.open();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets()).isNotNull();
assertThat(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty()).isTrue();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets())
.isEqualTo(PARTITION_STATE);
assertThat(consumerFunction.getRestoredState()).isNotNull();
assertThat(consumerFunction.getRestoredState()).isEqualTo(PARTITION_STATE);
consumerOperator.close();
consumerOperator.cancel();
}
private static class DummyFlinkKafkaConsumer<T> extends FlinkKafkaConsumerBase<T> {
private static final long serialVersionUID = 1L;
private final List<KafkaTopicPartition> partitions;
private final AbstractFetcher<T, ?> fetcher;
@SuppressWarnings("unchecked")
DummyFlinkKafkaConsumer(
AbstractFetcher<T, ?> fetcher,
List<String> topics,
List<KafkaTopicPartition> partitions,
long discoveryInterval) {
super(
topics,
null,
(KafkaDeserializationSchema<T>) mock(KafkaDeserializationSchema.class),
discoveryInterval,
false);
this.fetcher = fetcher;
this.partitions = partitions;
}
DummyFlinkKafkaConsumer(
List<String> topics, List<KafkaTopicPartition> partitions, long discoveryInterval) {
this(mock(AbstractFetcher.class), topics, partitions, discoveryInterval);
}
@Override
protected AbstractFetcher<T, ?> createFetcher(
SourceContext<T> sourceContext,
Map<KafkaTopicPartition, Long> thisSubtaskPartitionsWithStartOffsets,
SerializedValue<WatermarkStrategy<T>> watermarkStrategy,
StreamingRuntimeContext runtimeContext,
OffsetCommitMode offsetCommitMode,
MetricGroup consumerMetricGroup,
boolean useMetrics)
throws Exception {
return fetcher;
}
@Override
protected AbstractPartitionDiscoverer createPartitionDiscoverer(
KafkaTopicsDescriptor topicsDescriptor,
int indexOfThisSubtask,
int numParallelSubtasks) {
AbstractPartitionDiscoverer mockPartitionDiscoverer =
mock(AbstractPartitionDiscoverer.class);
try {
when(mockPartitionDiscoverer.discoverPartitions()).thenReturn(partitions);
} catch (Exception e) {
}
when(mockPartitionDiscoverer.setAndCheckDiscoveredPartition(
any(KafkaTopicPartition.class)))
.thenReturn(true);
return mockPartitionDiscoverer;
}
@Override
protected boolean getIsAutoCommitEnabled() {
return false;
}
@Override
protected Map<KafkaTopicPartition, Long> fetchOffsetsWithTimestamp(
Collection<KafkaTopicPartition> partitions, long timestamp) {
throw new UnsupportedOperationException();
}
}
private abstract static class DummySourceContext
implements SourceFunction.SourceContext<String> {
private final Object lock = new Object();
@Override
public void collectWithTimestamp(String element, long timestamp) {}
@Override
public void emitWatermark(Watermark mark) {}
@Override
public Object getCheckpointLock() {
return lock;
}
@Override
public void close() {}
@Override
public void markAsTemporarilyIdle() {}
}
} | class FlinkKafkaConsumerBaseMigrationTest {
/**
* TODO change this to the corresponding savepoint version to be written (e.g. {@link
* FlinkVersion
* methods to generate savepoints TODO Note: You should generate the savepoint based on the
* release branch instead of the master.
*/
private final FlinkVersion flinkGenerateSavepointVersion = null;
private static final HashMap<KafkaTopicPartition, Long> PARTITION_STATE = new HashMap<>();
static {
PARTITION_STATE.put(new KafkaTopicPartition("abc", 13), 16768L);
PARTITION_STATE.put(new KafkaTopicPartition("def", 7), 987654321L);
}
private static final List<String> TOPICS =
new ArrayList<>(PARTITION_STATE.keySet())
.stream().map(p -> p.getTopic()).distinct().collect(Collectors.toList());
private final FlinkVersion testMigrateVersion;
@Parameterized.Parameters(name = "Migration Savepoint: {0}")
public static Collection<FlinkVersion> parameters() {
return FlinkVersion.rangeOf(FlinkVersion.v1_4, FlinkVersion.v1_15);
}
public FlinkKafkaConsumerBaseMigrationTest(FlinkVersion testMigrateVersion) {
this.testMigrateVersion = testMigrateVersion;
}
/** Manually run this to write binary snapshot data. */
@Ignore
@Test
public void writeSnapshot() throws Exception {
writeSnapshot(
"src/test/resources/kafka-consumer-migration-test-flink"
+ flinkGenerateSavepointVersion
+ "-snapshot",
PARTITION_STATE);
final HashMap<KafkaTopicPartition, Long> emptyState = new HashMap<>();
writeSnapshot(
"src/test/resources/kafka-consumer-migration-test-flink"
+ flinkGenerateSavepointVersion
+ "-empty-state-snapshot",
emptyState);
}
private void writeSnapshot(String path, HashMap<KafkaTopicPartition, Long> state)
throws Exception {
final OneShotLatch latch = new OneShotLatch();
final AbstractFetcher<String, ?> fetcher = mock(AbstractFetcher.class);
doAnswer(
new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
latch.trigger();
return null;
}
})
.when(fetcher)
.runFetchLoop();
when(fetcher.snapshotCurrentState()).thenReturn(state);
final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
fetcher,
TOPICS,
partitions,
FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.open();
final Throwable[] error = new Throwable[1];
Thread runner =
new Thread() {
@Override
public void run() {
try {
consumerFunction.run(
new DummySourceContext() {
@Override
public void collect(String element) {}
});
} catch (Throwable t) {
t.printStackTrace();
error[0] = t;
}
}
};
runner.start();
if (!latch.isTriggered()) {
latch.await();
}
final OperatorSubtaskState snapshot;
synchronized (testHarness.getCheckpointLock()) {
snapshot = testHarness.snapshot(0L, 0L);
}
OperatorSnapshotUtil.writeStateHandle(snapshot, path);
consumerOperator.close();
runner.join();
}
/** Test restoring from an legacy empty state, when no partitions could be found for topics. */
@Test
public void testRestoreFromEmptyStateNoPartitions() throws Exception {
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
Collections.singletonList("dummy-topic"),
Collections.<KafkaTopicPartition>emptyList(),
FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.initializeState(
OperatorSnapshotUtil.getResourceFilename(
"kafka-consumer-migration-test-flink"
+ testMigrateVersion
+ "-empty-state-snapshot"));
testHarness.open();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets()).isEmpty();
assertThat(consumerFunction.getRestoredState()).isEmpty();
consumerOperator.close();
consumerOperator.cancel();
}
/**
* Test restoring from an empty state taken using a previous Flink version, when some partitions
* could be found for topics.
*/
@Test
/**
* Test restoring from a non-empty state taken using a previous Flink version, when some
* partitions could be found for topics.
*/
@Test
public void testRestore() throws Exception {
final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());
final DummyFlinkKafkaConsumer<String> consumerFunction =
new DummyFlinkKafkaConsumer<>(
TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);
StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
new StreamSource<>(consumerFunction);
final AbstractStreamOperatorTestHarness<String> testHarness =
new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);
testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);
testHarness.setup();
testHarness.initializeState(
OperatorSnapshotUtil.getResourceFilename(
"kafka-consumer-migration-test-flink" + testMigrateVersion + "-snapshot"));
testHarness.open();
assertThat(consumerFunction.getSubscribedPartitionsToStartOffsets())
.isNotEmpty()
.isEqualTo(PARTITION_STATE);
assertThat(consumerFunction.getRestoredState()).isNotNull().isEqualTo(PARTITION_STATE);
consumerOperator.close();
consumerOperator.cancel();
}
private static class DummyFlinkKafkaConsumer<T> extends FlinkKafkaConsumerBase<T> {
private static final long serialVersionUID = 1L;
private final List<KafkaTopicPartition> partitions;
private final AbstractFetcher<T, ?> fetcher;
@SuppressWarnings("unchecked")
DummyFlinkKafkaConsumer(
AbstractFetcher<T, ?> fetcher,
List<String> topics,
List<KafkaTopicPartition> partitions,
long discoveryInterval) {
super(
topics,
null,
(KafkaDeserializationSchema<T>) mock(KafkaDeserializationSchema.class),
discoveryInterval,
false);
this.fetcher = fetcher;
this.partitions = partitions;
}
DummyFlinkKafkaConsumer(
List<String> topics, List<KafkaTopicPartition> partitions, long discoveryInterval) {
this(mock(AbstractFetcher.class), topics, partitions, discoveryInterval);
}
@Override
protected AbstractFetcher<T, ?> createFetcher(
SourceContext<T> sourceContext,
Map<KafkaTopicPartition, Long> thisSubtaskPartitionsWithStartOffsets,
SerializedValue<WatermarkStrategy<T>> watermarkStrategy,
StreamingRuntimeContext runtimeContext,
OffsetCommitMode offsetCommitMode,
MetricGroup consumerMetricGroup,
boolean useMetrics)
throws Exception {
return fetcher;
}
@Override
protected AbstractPartitionDiscoverer createPartitionDiscoverer(
KafkaTopicsDescriptor topicsDescriptor,
int indexOfThisSubtask,
int numParallelSubtasks) {
AbstractPartitionDiscoverer mockPartitionDiscoverer =
mock(AbstractPartitionDiscoverer.class);
try {
when(mockPartitionDiscoverer.discoverPartitions()).thenReturn(partitions);
} catch (Exception e) {
}
when(mockPartitionDiscoverer.setAndCheckDiscoveredPartition(
any(KafkaTopicPartition.class)))
.thenReturn(true);
return mockPartitionDiscoverer;
}
@Override
protected boolean getIsAutoCommitEnabled() {
return false;
}
@Override
protected Map<KafkaTopicPartition, Long> fetchOffsetsWithTimestamp(
Collection<KafkaTopicPartition> partitions, long timestamp) {
throw new UnsupportedOperationException();
}
}
private abstract static class DummySourceContext
implements SourceFunction.SourceContext<String> {
private final Object lock = new Object();
@Override
public void collectWithTimestamp(String element, long timestamp) {}
@Override
public void emitWatermark(Watermark mark) {}
@Override
public Object getCheckpointLock() {
return lock;
}
@Override
public void close() {}
@Override
public void markAsTemporarilyIdle() {}
}
} |
This test was for some removed logics in TimeUtils. Currently this case can cause `ArithmeticException` thrown from `java.time.Duration`. But I think we do not need it, just like we do not need to add tests for formats validation, which should be tested by `java.time.Duration` itself. | public void testParseDurationInvalid() {
try {
TimeUtils.parseDuration(null);
fail("exception expected");
} catch (NullPointerException ignored) {
}
try {
TimeUtils.parseDuration("");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration(" ");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("foobar or fubar or foo bazz");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("16 gjah");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("16 16 17 18 ms");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("-100 ms");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
} | } catch (IllegalArgumentException ignored) { | public void testParseDurationInvalid() {
try {
TimeUtils.parseDuration(null);
fail("exception expected");
} catch (NullPointerException ignored) {
}
try {
TimeUtils.parseDuration("");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration(" ");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("foobar or fubar or foo bazz");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("16 gjah");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("16 16 17 18 ms");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
try {
TimeUtils.parseDuration("-100 ms");
fail("exception expected");
} catch (IllegalArgumentException ignored) {
}
} | class TimeUtilsTest {
@Test
public void testParseDurationNanos() {
assertEquals(424562, TimeUtils.parseDuration("424562ns").getNano());
assertEquals(424562, TimeUtils.parseDuration("424562nano").getNano());
assertEquals(424562, TimeUtils.parseDuration("424562nanosecond").getNano());
assertEquals(424562, TimeUtils.parseDuration("424562 ns").getNano());
}
@Test
public void testParseDurationMicros() {
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731µs").getNano());
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731micro").getNano());
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731microsecond").getNano());
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731 µs").getNano());
}
@Test
public void testParseDurationMillis() {
assertEquals(1234, TimeUtils.parseDuration("1234").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234ms").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234milli").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234millisecond").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234 ms").toMillis());
}
@Test
public void testParseDurationSeconds() {
assertEquals(667766, TimeUtils.parseDuration("667766s").getSeconds());
assertEquals(667766, TimeUtils.parseDuration("667766second").getSeconds());
assertEquals(667766, TimeUtils.parseDuration("667766 s").getSeconds());
}
@Test
public void testParseDurationMinutes() {
assertEquals(7657623, TimeUtils.parseDuration("7657623min").toMinutes());
assertEquals(7657623, TimeUtils.parseDuration("7657623minute").toMinutes());
assertEquals(7657623, TimeUtils.parseDuration("7657623 min").toMinutes());
}
@Test
public void testParseDurationHours() {
assertEquals(987654, TimeUtils.parseDuration("987654h").toHours());
assertEquals(987654, TimeUtils.parseDuration("987654hour").toHours());
assertEquals(987654, TimeUtils.parseDuration("987654 h").toHours());
}
@Test
public void testParseDurationDays() {
assertEquals(987654, TimeUtils.parseDuration("987654d").toDays());
assertEquals(987654, TimeUtils.parseDuration("987654day").toDays());
assertEquals(987654, TimeUtils.parseDuration("987654 d").toDays());
}
@Test
public void testParseDurationUpperCase() {
assertEquals(1L, TimeUtils.parseDuration("1 NS").toNanos());
assertEquals(1000L, TimeUtils.parseDuration("1 MICRO").toNanos());
assertEquals(1L, TimeUtils.parseDuration("1 MS").toMillis());
assertEquals(1L, TimeUtils.parseDuration("1 S").getSeconds());
assertEquals(1L, TimeUtils.parseDuration("1 MIN").toMinutes());
assertEquals(1L, TimeUtils.parseDuration("1 H").toHours());
assertEquals(1L, TimeUtils.parseDuration("1 D").toDays());
}
@Test
public void testParseDurationTrim() {
assertEquals(155L, TimeUtils.parseDuration(" 155 ").toMillis());
assertEquals(155L, TimeUtils.parseDuration(" 155 ms ").toMillis());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testParseDurationNumberOverflow() {
TimeUtils.parseDuration("100000000000000000000000000000000 ms");
}
} | class TimeUtilsTest {
@Test
public void testParseDurationNanos() {
assertEquals(424562, TimeUtils.parseDuration("424562ns").getNano());
assertEquals(424562, TimeUtils.parseDuration("424562nano").getNano());
assertEquals(424562, TimeUtils.parseDuration("424562nanosecond").getNano());
assertEquals(424562, TimeUtils.parseDuration("424562 ns").getNano());
}
@Test
public void testParseDurationMicros() {
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731µs").getNano());
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731micro").getNano());
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731microsecond").getNano());
assertEquals(565731 * 1000L, TimeUtils.parseDuration("565731 µs").getNano());
}
@Test
public void testParseDurationMillis() {
assertEquals(1234, TimeUtils.parseDuration("1234").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234ms").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234milli").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234millisecond").toMillis());
assertEquals(1234, TimeUtils.parseDuration("1234 ms").toMillis());
}
@Test
public void testParseDurationSeconds() {
assertEquals(667766, TimeUtils.parseDuration("667766s").getSeconds());
assertEquals(667766, TimeUtils.parseDuration("667766second").getSeconds());
assertEquals(667766, TimeUtils.parseDuration("667766 s").getSeconds());
}
@Test
public void testParseDurationMinutes() {
assertEquals(7657623, TimeUtils.parseDuration("7657623min").toMinutes());
assertEquals(7657623, TimeUtils.parseDuration("7657623minute").toMinutes());
assertEquals(7657623, TimeUtils.parseDuration("7657623 min").toMinutes());
}
@Test
public void testParseDurationHours() {
assertEquals(987654, TimeUtils.parseDuration("987654h").toHours());
assertEquals(987654, TimeUtils.parseDuration("987654hour").toHours());
assertEquals(987654, TimeUtils.parseDuration("987654 h").toHours());
}
@Test
public void testParseDurationDays() {
assertEquals(987654, TimeUtils.parseDuration("987654d").toDays());
assertEquals(987654, TimeUtils.parseDuration("987654day").toDays());
assertEquals(987654, TimeUtils.parseDuration("987654 d").toDays());
}
@Test
public void testParseDurationUpperCase() {
assertEquals(1L, TimeUtils.parseDuration("1 NS").toNanos());
assertEquals(1000L, TimeUtils.parseDuration("1 MICRO").toNanos());
assertEquals(1L, TimeUtils.parseDuration("1 MS").toMillis());
assertEquals(1L, TimeUtils.parseDuration("1 S").getSeconds());
assertEquals(1L, TimeUtils.parseDuration("1 MIN").toMinutes());
assertEquals(1L, TimeUtils.parseDuration("1 H").toHours());
assertEquals(1L, TimeUtils.parseDuration("1 D").toDays());
}
@Test
public void testParseDurationTrim() {
assertEquals(155L, TimeUtils.parseDuration(" 155 ").toMillis());
assertEquals(155L, TimeUtils.parseDuration(" 155 ms ").toMillis());
}
@Test
@Test(expected = IllegalArgumentException.class)
public void testParseDurationNumberOverflow() {
TimeUtils.parseDuration("100000000000000000000000000000000 ms");
}
} |
Can't. It takes an Action, which is not compatible with Consumer. | private void registerTasks(Project project) {
TaskContainer tasks = project.getTasks();
tasks.create(LIST_EXTENSIONS_TASK_NAME, QuarkusListExtensions.class);
tasks.create(ADD_EXTENSION_TASK_NAME, QuarkusAddExtension.class);
tasks.create(GENERATE_CONFIG_TASK_NAME, QuarkusGenerateConfig.class);
Task quarkusBuild = tasks.create(QUARKUS_BUILD_TASK_NAME, QuarkusBuild.class);
Task quarkusDev = tasks.create(QUARKUS_DEV_TASK_NAME, QuarkusDev.class);
Task buildNative = tasks.create(BUILD_NATIVE_TASK_NAME, QuarkusNative.class);
Task quarkusTestConfig = tasks.create(QUARKUS_TEST_CONFIG_TASK_NAME, QuarkusTestConfig.class);
project.getPlugins().withType(
BasePlugin.class,
basePlugin -> tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(quarkusBuild));
project.getPlugins().withType(
JavaPlugin.class,
javaPlugin -> {
Task classesTask = tasks.getByName(JavaPlugin.CLASSES_TASK_NAME);
quarkusDev.dependsOn(classesTask);
quarkusBuild.dependsOn(classesTask, tasks.getByName(JavaPlugin.JAR_TASK_NAME));
quarkusTestConfig.dependsOn(classesTask);
buildNative.dependsOn(tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME));
SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class)
.getSourceSets();
SourceSet nativeTestSourceSet = sourceSets.create(NATIVE_TEST_SOURCE_SET_NAME);
SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
SourceSet testSourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME);
nativeTestSourceSet.setCompileClasspath(
nativeTestSourceSet.getCompileClasspath()
.plus(mainSourceSet.getOutput())
.plus(testSourceSet.getOutput()));
nativeTestSourceSet.setRuntimeClasspath(
nativeTestSourceSet.getRuntimeClasspath()
.plus(mainSourceSet.getOutput())
.plus(testSourceSet.getOutput()));
ConfigurationContainer configurations = project.getConfigurations();
configurations.maybeCreate(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME)
.extendsFrom(configurations.findByName(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME));
configurations.maybeCreate(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME)
.extendsFrom(configurations.findByName(JavaPlugin.TEST_RUNTIME_ONLY_CONFIGURATION_NAME));
Task testNative = tasks.create(TEST_NATIVE_TASK_NAME, QuarkusTestNative.class);
testNative.dependsOn(buildNative);
testNative.setShouldRunAfter(Collections.singletonList(tasks.findByName(JavaPlugin.TEST_TASK_NAME)));
Consumer<Test> configureTestTask = t -> {
t.dependsOn(quarkusTestConfig);
t.useJUnitPlatform();
};
tasks.withType(Test.class).forEach(configureTestTask);
tasks.withType(Test.class).whenTaskAdded(t -> configureTestTask.accept(t));
});
} | tasks.withType(Test.class).whenTaskAdded(t -> configureTestTask.accept(t)); | private void registerTasks(Project project) {
TaskContainer tasks = project.getTasks();
tasks.create(LIST_EXTENSIONS_TASK_NAME, QuarkusListExtensions.class);
tasks.create(ADD_EXTENSION_TASK_NAME, QuarkusAddExtension.class);
tasks.create(GENERATE_CONFIG_TASK_NAME, QuarkusGenerateConfig.class);
Task quarkusBuild = tasks.create(QUARKUS_BUILD_TASK_NAME, QuarkusBuild.class);
Task quarkusDev = tasks.create(QUARKUS_DEV_TASK_NAME, QuarkusDev.class);
Task buildNative = tasks.create(BUILD_NATIVE_TASK_NAME, QuarkusNative.class);
Task quarkusTestConfig = tasks.create(QUARKUS_TEST_CONFIG_TASK_NAME, QuarkusTestConfig.class);
project.getPlugins().withType(
BasePlugin.class,
basePlugin -> tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME).dependsOn(quarkusBuild));
project.getPlugins().withType(
JavaPlugin.class,
javaPlugin -> {
Task classesTask = tasks.getByName(JavaPlugin.CLASSES_TASK_NAME);
quarkusDev.dependsOn(classesTask);
quarkusBuild.dependsOn(classesTask, tasks.getByName(JavaPlugin.JAR_TASK_NAME));
quarkusTestConfig.dependsOn(classesTask);
buildNative.dependsOn(tasks.getByName(BasePlugin.ASSEMBLE_TASK_NAME));
SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class)
.getSourceSets();
SourceSet nativeTestSourceSet = sourceSets.create(NATIVE_TEST_SOURCE_SET_NAME);
SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);
SourceSet testSourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME);
nativeTestSourceSet.setCompileClasspath(
nativeTestSourceSet.getCompileClasspath()
.plus(mainSourceSet.getOutput())
.plus(testSourceSet.getOutput()));
nativeTestSourceSet.setRuntimeClasspath(
nativeTestSourceSet.getRuntimeClasspath()
.plus(mainSourceSet.getOutput())
.plus(testSourceSet.getOutput()));
ConfigurationContainer configurations = project.getConfigurations();
configurations.maybeCreate(NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME)
.extendsFrom(configurations.findByName(JavaPlugin.TEST_IMPLEMENTATION_CONFIGURATION_NAME));
configurations.maybeCreate(NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME)
.extendsFrom(configurations.findByName(JavaPlugin.TEST_RUNTIME_ONLY_CONFIGURATION_NAME));
Task testNative = tasks.create(TEST_NATIVE_TASK_NAME, QuarkusTestNative.class);
testNative.dependsOn(buildNative);
testNative.setShouldRunAfter(Collections.singletonList(tasks.findByName(JavaPlugin.TEST_TASK_NAME)));
Consumer<Test> configureTestTask = t -> {
t.dependsOn(quarkusTestConfig);
t.useJUnitPlatform();
};
tasks.withType(Test.class).forEach(configureTestTask);
tasks.withType(Test.class).whenTaskAdded(t -> configureTestTask.accept(t));
});
} | class QuarkusPlugin implements Plugin<Project> {
public static final String ID = "io.quarkus";
public static final String EXTENSION_NAME = "quarkus";
public static final String LIST_EXTENSIONS_TASK_NAME = "listExtensions";
public static final String ADD_EXTENSION_TASK_NAME = "addExtension";
public static final String QUARKUS_BUILD_TASK_NAME = "quarkusBuild";
public static final String GENERATE_CONFIG_TASK_NAME = "generateConfig";
public static final String QUARKUS_DEV_TASK_NAME = "quarkusDev";
public static final String BUILD_NATIVE_TASK_NAME = "buildNative";
public static final String TEST_NATIVE_TASK_NAME = "testNative";
public static final String QUARKUS_TEST_CONFIG_TASK_NAME = "quarkusTestConfig";
public static final String NATIVE_TEST_SOURCE_SET_NAME = "native-test";
public static final String NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME = "nativeTestImplementation";
public static final String NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME = "nativeTestRuntimeOnly";
@Override
public void apply(Project project) {
verifyGradleVersion();
project.getExtensions().create(EXTENSION_NAME, QuarkusPluginExtension.class, project);
registerTasks(project);
}
private void verifyGradleVersion() {
if (GradleVersion.current().compareTo(GradleVersion.version("5.0")) < 0) {
throw new GradleException("Quarkus plugin requires Gradle 5.0 or later. Current version is: " +
GradleVersion.current());
}
}
} | class QuarkusPlugin implements Plugin<Project> {
public static final String ID = "io.quarkus";
public static final String EXTENSION_NAME = "quarkus";
public static final String LIST_EXTENSIONS_TASK_NAME = "listExtensions";
public static final String ADD_EXTENSION_TASK_NAME = "addExtension";
public static final String QUARKUS_BUILD_TASK_NAME = "quarkusBuild";
public static final String GENERATE_CONFIG_TASK_NAME = "generateConfig";
public static final String QUARKUS_DEV_TASK_NAME = "quarkusDev";
public static final String BUILD_NATIVE_TASK_NAME = "buildNative";
public static final String TEST_NATIVE_TASK_NAME = "testNative";
public static final String QUARKUS_TEST_CONFIG_TASK_NAME = "quarkusTestConfig";
public static final String NATIVE_TEST_SOURCE_SET_NAME = "native-test";
public static final String NATIVE_TEST_IMPLEMENTATION_CONFIGURATION_NAME = "nativeTestImplementation";
public static final String NATIVE_TEST_RUNTIME_ONLY_CONFIGURATION_NAME = "nativeTestRuntimeOnly";
@Override
public void apply(Project project) {
verifyGradleVersion();
project.getExtensions().create(EXTENSION_NAME, QuarkusPluginExtension.class, project);
registerTasks(project);
}
private void verifyGradleVersion() {
if (GradleVersion.current().compareTo(GradleVersion.version("5.0")) < 0) {
throw new GradleException("Quarkus plugin requires Gradle 5.0 or later. Current version is: " +
GradleVersion.current());
}
}
} |
This could be logged at a different level than warn depending on the severity of this situation. LOG.warn("Failed to get WAL queue size, backend id: {}, status: {}", backend.getId(), response.getStatus()); Consider using exponential backoff or a configurable delay before retrying. | public boolean isPreviousWalFinished(long tableId, long endTransactionId, List<Long> aliveBeIds) {
boolean empty = true;
for (int i = 0; i < aliveBeIds.size(); i++) {
Backend backend = Env.getCurrentSystemInfo().getBackend(aliveBeIds.get(i));
PGetWalQueueSizeRequest request = PGetWalQueueSizeRequest.newBuilder()
.setTableId(tableId)
.setTxnId(endTransactionId)
.build();
PGetWalQueueSizeResponse response = null;
long start = System.currentTimeMillis();
boolean done = false;
long size = 0;
while (!done && System.currentTimeMillis() - start <= Config.check_wal_queue_timeout_threshold) {
try {
Future<PGetWalQueueSizeResponse> future = BackendServiceProxy.getInstance()
.getWalQueueSize(new TNetworkAddress(backend.getHost(), backend.getBrpcPort()), request);
response = future.get();
} catch (Exception e) {
LOG.warn("encounter exception while getting wal queue size on backend id: " + backend.getId()
+ ",exception:" + e);
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
LOG.info("group commit manager sleep wait InterruptedException: ", ie);
}
continue;
}
TStatusCode code = TStatusCode.findByValue(response.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
String msg = "get wal queue size fail,backend id: " + backend.getId() + ", status: "
+ response.getStatus();
LOG.warn(msg);
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
LOG.info("group commit manager sleep wait InterruptedException: ", ie);
}
continue;
}
size = response.getSize();
done = true;
}
if (size > 0) {
LOG.info("backend id:" + backend.getId() + ",wal size:" + size);
empty = false;
}
}
return empty;
} | continue; | public boolean isPreviousWalFinished(long tableId, long endTransactionId, List<Long> aliveBeIds) {
boolean empty = true;
for (int i = 0; i < aliveBeIds.size(); i++) {
Backend backend = Env.getCurrentSystemInfo().getBackend(aliveBeIds.get(i));
if (backend.getBrpcPort() < 0) {
return true;
}
PGetWalQueueSizeRequest request = PGetWalQueueSizeRequest.newBuilder()
.setTableId(tableId)
.setTxnId(endTransactionId)
.build();
long size = getWallQueueSize(backend, request);
if (size > 0) {
LOG.info("backend id:" + backend.getId() + ",wal size:" + size);
empty = false;
}
}
return empty;
} | class GroupCommitManager {
public enum SchemaChangeStatus {
BLOCK, NORMAL
}
private static final Logger LOG = LogManager.getLogger(GroupCommitManager.class);
private Map<Long, SchemaChangeStatus> statusMap = new HashMap<>();
public synchronized boolean isBlock(long tableId) {
if (statusMap.containsKey(tableId)) {
return statusMap.get(tableId) == SchemaChangeStatus.BLOCK;
}
return false;
}
public synchronized void setStatus(long tableId, SchemaChangeStatus status) {
statusMap.put(tableId, status);
}
/**
* Check the wal before the endTransactionId is finished or not.
*/
public long getAllWalQueueSize(Backend backend) {
PGetWalQueueSizeRequest request = PGetWalQueueSizeRequest.newBuilder()
.build();
PGetWalQueueSizeResponse response = null;
long start = System.currentTimeMillis();
boolean done = false;
long size = 0;
while (!done && System.currentTimeMillis() - start <= Config.check_wal_queue_timeout_threshold) {
try {
Future<PGetWalQueueSizeResponse> future = BackendServiceProxy.getInstance()
.getAllWalQueueSize(new TNetworkAddress(backend.getHost(), backend.getBrpcPort()), request);
response = future.get();
} catch (Exception e) {
LOG.warn("encounter exception while getting all wal queue size on backend id: " + backend.getId()
+ ",exception:" + e);
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
LOG.info("group commit manager sleep wait InterruptedException: ", ie);
}
continue;
}
TStatusCode code = TStatusCode.findByValue(response.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
String msg = "get all wal queue size fail,backend id: " + backend.getId() + ", status: "
+ response.getStatus();
LOG.warn(msg);
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
LOG.info("group commit manager sleep wait InterruptedException: ", ie);
}
continue;
}
size = response.getSize();
done = true;
}
if (size > 0) {
LOG.info("backend id:" + backend.getId() + ",all wal size:" + size);
}
return size;
}
public boolean needRecovery(long dbId, long transactionId) {
TransactionState state = Env.getCurrentGlobalTransactionMgr()
.getTransactionState(dbId, transactionId);
if (state != null && state.getTransactionStatus() == TransactionStatus.COMMITTED
|| state.getTransactionStatus() == TransactionStatus.VISIBLE) {
LOG.info("txn {} state is {}, "
+ "skip recovery", transactionId, state.getTransactionStatus());
return false;
} else {
LOG.info("txn {} state is {} ,need recovery", transactionId,
state == null ? "null" : state.getTransactionStatus());
return true;
}
}
} | class GroupCommitManager {
public enum SchemaChangeStatus {
BLOCK, NORMAL
}
private static final Logger LOG = LogManager.getLogger(GroupCommitManager.class);
private final Map<Long, SchemaChangeStatus> statusMap = new ConcurrentHashMap<>();
public boolean isBlock(long tableId) {
if (statusMap.containsKey(tableId)) {
return statusMap.get(tableId) == SchemaChangeStatus.BLOCK;
}
return false;
}
public void setStatus(long tableId, SchemaChangeStatus status) {
LOG.debug("Setting status for tableId {}: {}", tableId, status);
statusMap.put(tableId, status);
}
/**
* Check the wal before the endTransactionId is finished or not.
*/
public long getAllWalQueueSize(Backend backend) {
PGetWalQueueSizeRequest request = PGetWalQueueSizeRequest.newBuilder()
.setTableId(-1)
.setTxnId(-1)
.build();
long size = getWallQueueSize(backend, request);
if (size > 0) {
LOG.info("backend id:" + backend.getId() + ",all wal size:" + size);
}
return size;
}
public long getWallQueueSize(Backend backend, PGetWalQueueSizeRequest request) {
PGetWalQueueSizeResponse response = null;
long expireTime = System.currentTimeMillis() + Config.check_wal_queue_timeout_threshold;
long size = 0;
while (System.currentTimeMillis() <= expireTime) {
if (!backend.isAlive()) {
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
LOG.info("group commit manager sleep wait InterruptedException: ", ie);
}
continue;
}
try {
Future<PGetWalQueueSizeResponse> future = BackendServiceProxy.getInstance()
.getWalQueueSize(new TNetworkAddress(backend.getHost(), backend.getBrpcPort()), request);
response = future.get();
} catch (Exception e) {
LOG.warn("encounter exception while getting wal queue size on backend id: " + backend.getId()
+ ",exception:" + e);
String msg = e.getMessage();
if (msg.contains("Method") && msg.contains("unimplemented")) {
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
LOG.info("group commit manager sleep wait InterruptedException: ", ie);
}
continue;
}
TStatusCode code = TStatusCode.findByValue(response.getStatus().getStatusCode());
if (code != TStatusCode.OK) {
String msg = "get all queue size fail,backend id: " + backend.getId() + ", status: "
+ response.getStatus();
LOG.warn(msg);
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
LOG.info("group commit manager sleep wait InterruptedException: ", ie);
}
continue;
}
size = response.getSize();
break;
}
return size;
}
} |
Yes, that's intentional. The CompositeReadableBuffer has an optimized path when it wraps only one byte[] array. When sending such an inner byte[], the byte[] is directly used in fast path [here](https://github.com/apache/qpid-proton-j/blob/8c1f2326d46b9a67ae14bc3431acc6cddfbb7524/proton-j/src/main/java/org/apache/qpid/proton/engine/impl/DeliveryImpl.java#L379) (with allocation equivalent to old way [here](https://github.com/apache/qpid-proton-j/blob/8c1f2326d46b9a67ae14bc3431acc6cddfbb7524/proton-j/src/main/java/org/apache/qpid/proton/engine/impl/DeliveryImpl.java#L342)) | public Mono<DeliveryState> send(byte[] bytes, int arrayOffset, int messageFormat, DeliveryState deliveryState) {
final CompositeReadableBuffer buffer = new CompositeReadableBuffer();
buffer.append(bytes).limit(arrayOffset);
return send(buffer, messageFormat, deliveryState);
} | final CompositeReadableBuffer buffer = new CompositeReadableBuffer(); | public Mono<DeliveryState> send(byte[] bytes, int arrayOffset, int messageFormat, DeliveryState deliveryState) {
return onEndpointActive().then(Mono.create(sink -> {
sendWork(new RetriableWorkItem(bytes, arrayOffset, messageFormat, sink, retryOptions.getTryTimeout(),
deliveryState, metricsProvider));
}));
} | class ReactorSender implements AmqpSendLink, AsyncCloseable, AutoCloseable {
private static final String DELIVERY_TAG_KEY = "deliveryTag";
private static final String PENDING_SENDS_SIZE_KEY = "pending_sends_size";
private final String entityPath;
private final Sender sender;
private final SendLinkHandler handler;
private final ReactorProvider reactorProvider;
private final Disposable.Composite subscriptions;
private final AtomicBoolean hasConnected = new AtomicBoolean();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final AtomicInteger retryAttempts = new AtomicInteger();
private final Sinks.Empty<Void> isClosedMono = Sinks.empty();
private final Object pendingSendLock = new Object();
private final ConcurrentHashMap<String, RetriableWorkItem> pendingSendsMap = new ConcurrentHashMap<>();
private final PriorityQueue<WeightedDeliveryTag> pendingSendsQueue =
new PriorityQueue<>(1000, new DeliveryTagComparator());
private final ClientLogger logger;
private final Flux<AmqpEndpointState> endpointStates;
private final TokenManager tokenManager;
private final MessageSerializer messageSerializer;
private final AmqpRetryPolicy retry;
private final AmqpRetryOptions retryOptions;
private final String activeTimeoutMessage;
private final Scheduler scheduler;
private final AmqpMetricsProvider metricsProvider;
private final Object errorConditionLock = new Object();
private volatile Exception lastKnownLinkError;
private volatile Instant lastKnownErrorReportedAt;
private volatile int linkSize;
/**
* Creates an instance of {@link ReactorSender}.
*
* @param amqpConnection The parent {@link AmqpConnection} that this sender lives in.
* @param entityPath The message broker address for the sender.
* @param sender The underlying proton-j sender.
* @param handler The proton-j handler associated with the sender.
* @param reactorProvider Provider to schedule work on the proton-j reactor.
* @param tokenManager Token manager for authorising with the CBS node. Can be {@code null} if it is part of the
* transaction manager.
* @param messageSerializer Serializer to deserialise and serialize AMQP messages.
* @param retryOptions Retry options.
* @param scheduler Scheduler to schedule send timeout.
*/
ReactorSender(AmqpConnection amqpConnection, String entityPath, Sender sender, SendLinkHandler handler,
ReactorProvider reactorProvider, TokenManager tokenManager, MessageSerializer messageSerializer,
AmqpRetryOptions retryOptions, Scheduler scheduler, AmqpMetricsProvider metricsProvider) {
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.sender = Objects.requireNonNull(sender, "'sender' cannot be null.");
this.handler = Objects.requireNonNull(handler, "'handler' cannot be null.");
this.reactorProvider = Objects.requireNonNull(reactorProvider, "'reactorProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null.");
this.scheduler = Objects.requireNonNull(scheduler, "'scheduler' cannot be null.");
this.retry = RetryUtil.getRetryPolicy(retryOptions);
this.tokenManager = tokenManager;
this.metricsProvider = metricsProvider;
String connectionId = handler.getConnectionId() == null ? NOT_APPLICABLE : handler.getConnectionId();
String linkName = getLinkName() == null ? NOT_APPLICABLE : getLinkName();
Map<String, Object> loggingContext = createContextWithConnectionId(connectionId);
loggingContext.put(LINK_NAME_KEY, linkName);
loggingContext.put(ENTITY_PATH_KEY, entityPath);
this.logger = new ClientLogger(ReactorSender.class, loggingContext);
this.activeTimeoutMessage = String.format(
"ReactorSender connectionId[%s] linkName[%s]: Waiting for send and receive handler to be ACTIVE",
handler.getConnectionId(), handler.getLinkName());
this.endpointStates = this.handler.getEndpointStates()
.map(state -> {
logger.verbose("State {}", state);
this.hasConnected.set(state == EndpointState.ACTIVE);
return AmqpEndpointStateUtil.getConnectionState(state);
})
.doOnError(error -> {
hasConnected.set(false);
handleError(error);
})
.doOnComplete(() -> {
hasConnected.set(false);
handleClose();
})
.cache(1);
this.subscriptions = Disposables.composite(
this.endpointStates.subscribe(),
this.handler.getDeliveredMessages().subscribe(this::processDeliveredMessage),
this.handler.getLinkCredits().subscribe(credit -> {
logger.atVerbose().addKeyValue("credits", credit)
.log("Credits on link.");
this.scheduleWorkOnDispatcher();
}),
amqpConnection.getShutdownSignals().flatMap(signal -> {
logger.verbose("Shutdown signal received.");
hasConnected.set(false);
return closeAsync("Connection shutdown.", null);
}).subscribe()
);
if (tokenManager != null) {
this.subscriptions.add(tokenManager.getAuthorizationResults().onErrorResume(error -> {
final Mono<Void> operation =
closeAsync(String.format("connectionId[%s] linkName[%s] Token renewal failure. Disposing send "
+ "link.", amqpConnection.getId(), getLinkName()),
new ErrorCondition(Symbol.getSymbol(NOT_ALLOWED.getErrorCondition()),
error.getMessage()));
return operation.then(Mono.empty());
}).subscribe(response -> {
logger.atVerbose().addKeyValue("response", response)
.log("Token refreshed.");
}, error -> {
}, () -> {
logger.verbose(" Authorization completed. Disposing.");
closeAsync("Authorization completed. Disposing.", null).subscribe();
}));
}
}
@Override
public Flux<AmqpEndpointState> getEndpointStates() {
return endpointStates;
}
@Override
public Mono<Void> send(Message message) {
return send(message, null);
}
@Override
public Mono<Void> send(Message message, DeliveryState deliveryState) {
if (isDisposed.get()) {
return Mono.error(new IllegalStateException(String.format(
"connectionId[%s] linkName[%s] Cannot publish message when disposed.", handler.getConnectionId(),
getLinkName())));
}
return getLinkSize()
.flatMap(maxMessageSize -> {
final int payloadSize = messageSerializer.getSize(message);
final int allocationSize =
Math.min(payloadSize + MAX_AMQP_HEADER_SIZE_BYTES, maxMessageSize);
final byte[] bytes = new byte[allocationSize];
int encodedSize;
try {
encodedSize = message.encode(bytes, 0, allocationSize);
} catch (BufferOverflowException exception) {
final String errorMessage =
String.format(Locale.US,
"Error sending. Size of the payload exceeded maximum message size: %s kb",
maxMessageSize / 1024);
final Throwable error = new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
errorMessage, exception, handler.getErrorContext(sender));
return Mono.error(error);
}
return send(bytes, encodedSize, DeliveryImpl.DEFAULT_MESSAGE_FORMAT, deliveryState);
}).then();
}
@Override
public Mono<Void> send(List<Message> messageBatch) {
return send(messageBatch, null);
}
@Override
public Mono<Void> send(List<Message> messageBatch, DeliveryState deliveryState) {
if (isDisposed.get()) {
return Mono.error(new IllegalStateException(String.format(
"connectionId[%s] linkName[%s] Cannot publish data batch when disposed.", handler.getConnectionId(),
getLinkName())));
}
if (messageBatch.size() == 1) {
return send(messageBatch.get(0), deliveryState);
}
return getLinkSize()
.flatMap(maxMessageSize -> {
int totalEncodedSize = 0;
final CompositeReadableBuffer buffer = new CompositeReadableBuffer();
final byte[] envelopBytes = batchEnvelopBytes(messageBatch.get(0), maxMessageSize);
if (envelopBytes.length > 0) {
totalEncodedSize += envelopBytes.length;
if (totalEncodedSize > maxMessageSize) {
return batchBufferOverflowError(maxMessageSize);
}
buffer.append(envelopBytes);
}
for (final Message message : messageBatch) {
final byte[] sectionBytes = batchBinaryDataSectionBytes(message, maxMessageSize);
if (sectionBytes.length > 0) {
totalEncodedSize += sectionBytes.length;
if (totalEncodedSize > maxMessageSize) {
return batchBufferOverflowError(maxMessageSize);
}
buffer.append(sectionBytes);
}
}
return send(buffer, AmqpConstants.AMQP_BATCH_MESSAGE_FORMAT, deliveryState);
}).then();
}
private byte[] batchEnvelopBytes(Message envelopMessage, int maxMessageSize) {
final Message message = Proton.message();
message.setMessageAnnotations(envelopMessage.getMessageAnnotations());
final int size = messageSerializer.getSize(message);
final int allocationSize = Math.min(size + MAX_AMQP_HEADER_SIZE_BYTES, maxMessageSize);
final byte[] encodedBytes = new byte[allocationSize];
final int encodedSize = message.encode(encodedBytes, 0, allocationSize);
return Arrays.copyOf(encodedBytes, encodedSize);
}
private byte[] batchBinaryDataSectionBytes(Message sectionMessage, int maxMessageSize) {
final int size = messageSerializer.getSize(sectionMessage);
final int allocationSize = Math.min(size + MAX_AMQP_HEADER_SIZE_BYTES, maxMessageSize);
final byte[] encodedBytes = new byte[allocationSize];
final int encodedSize = sectionMessage.encode(encodedBytes, 0, allocationSize);
final Message message = Proton.message();
final Data binaryData = new Data(new Binary(encodedBytes, 0, encodedSize));
message.setBody(binaryData);
final int binaryRawSize = binaryData.getValue().getLength();
final int binaryEncodedSize = binaryEncodedSize(binaryRawSize);
final byte[] binaryEncodedBytes = new byte[binaryEncodedSize];
message.encode(binaryEncodedBytes, 0, binaryEncodedSize);
return binaryEncodedBytes;
}
private Mono<Void> batchBufferOverflowError(int maxMessageSize) {
return Mono.error(new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
String.format(Locale.US, "Size of the payload exceeded maximum message size: %s kb", maxMessageSize / 1024),
new BufferOverflowException(), handler.getErrorContext(sender)));
}
/**
* Compute the encoded size when encoding a binary data of given size per Amqp 1.0 spec "amqp:data:binary" format.
*
* @param binaryRawSize the length of the binary data.
* @return the encoded size.
*/
private int binaryEncodedSize(int binaryRawSize) {
if (binaryRawSize <= 255) {
return 5 + binaryRawSize;
} else {
return 8 + binaryRawSize;
}
}
@Override
public AmqpErrorContext getErrorContext() {
return handler.getErrorContext(sender);
}
@Override
public String getLinkName() {
return sender.getName();
}
@Override
public String getEntityPath() {
return entityPath;
}
@Override
public String getHostname() {
return handler.getHostname();
}
@Override
public Mono<Integer> getLinkSize() {
if (linkSize > 0) {
return Mono.defer(() -> Mono.just(this.linkSize));
}
synchronized (this) {
if (linkSize > 0) {
return Mono.defer(() -> Mono.just(linkSize));
}
return RetryUtil.withRetry(getEndpointStates().takeUntil(state -> state == AmqpEndpointState.ACTIVE),
retryOptions, activeTimeoutMessage)
.then(Mono.fromCallable(() -> {
final UnsignedLong remoteMaxMessageSize = sender.getRemoteMaxMessageSize();
if (remoteMaxMessageSize != null) {
linkSize = remoteMaxMessageSize.intValue();
} else {
logger.warning("Could not get the getRemoteMaxMessageSize. Returning current link size: {}", linkSize);
}
return linkSize;
}));
}
}
@Override
public boolean isDisposed() {
return isDisposed.get();
}
/**
* Blocking call that disposes of the sender.
*
* @see
*/
@Override
public void dispose() {
close();
}
/**
* Blocking call that disposes of the sender.
*
* @see
*/
@Override
public void close() {
closeAsync().block(retryOptions.getTryTimeout());
}
@Override
public Mono<Void> closeAsync() {
return closeAsync("User invoked close operation.", null);
}
/**
* Disposes of the sender.
*
* @param errorCondition Error condition associated with close operation.
* @param message Message associated with why the sender was closed.
*
* @return A mono that completes when the send link has closed.
*/
Mono<Void> closeAsync(String message, ErrorCondition errorCondition) {
if (isDisposed.getAndSet(true)) {
return isClosedMono.asMono();
}
addErrorCondition(logger.atVerbose(), errorCondition)
.log("Setting error condition and disposing. {}", message);
final Runnable closeWork = () -> {
if (errorCondition != null && sender.getCondition() == null) {
sender.setCondition(errorCondition);
}
sender.close();
};
return Mono.fromRunnable(() -> {
try {
reactorProvider.getReactorDispatcher().invoke(closeWork);
} catch (IOException e) {
logger.warning("Could not schedule close work. Running manually. And completing close.", e);
closeWork.run();
handleClose();
} catch (RejectedExecutionException e) {
logger.info("RejectedExecutionException scheduling close work. And completing close.");
closeWork.run();
handleClose();
}
}).then(isClosedMono.asMono())
.publishOn(Schedulers.boundedElastic());
}
/**
* A mono that completes when the sender has completely closed.
*
* @return mono that completes when the sender has completely closed.
*/
Mono<Void> isClosed() {
return isClosedMono.asMono();
}
@Override
Mono<DeliveryState> send(ReadableBuffer buffer, int messageFormat, DeliveryState deliveryState) {
final Flux<EndpointState> activeEndpointFlux = RetryUtil.withRetry(
handler.getEndpointStates().takeUntil(state -> state == EndpointState.ACTIVE), retryOptions,
activeTimeoutMessage);
return activeEndpointFlux.then(Mono.create(sink -> {
sendWork(new RetriableWorkItem(buffer, messageFormat, sink, retryOptions.getTryTimeout(), deliveryState, metricsProvider));
}));
}
/**
* Add the work item in pending send to be processed on {@link ReactorDispatcher} thread.
*
* @param workItem to be processed.
*/
private void sendWork(RetriableWorkItem workItem) {
final String deliveryTag = UUID.randomUUID().toString().replace("-", "");
synchronized (pendingSendLock) {
this.pendingSendsMap.put(deliveryTag, workItem);
this.pendingSendsQueue.offer(new WeightedDeliveryTag(deliveryTag, workItem.hasBeenRetried() ? 1 : 0));
}
this.scheduleWorkOnDispatcher();
}
/**
* Invokes work on the Reactor. Should only be called from ReactorDispatcher.invoke()
*/
private void processSendWork() {
if (!hasConnected.get()) {
logger.warning("Not connected. Not processing send work.");
return;
}
if (isDisposed.get()) {
logger.info("Sender is closed. Not executing work.");
return;
}
while (hasConnected.get() && sender.getCredit() > 0) {
final WeightedDeliveryTag weightedDelivery;
final RetriableWorkItem workItem;
final String deliveryTag;
synchronized (pendingSendLock) {
weightedDelivery = this.pendingSendsQueue.poll();
if (weightedDelivery != null) {
deliveryTag = weightedDelivery.getDeliveryTag();
workItem = this.pendingSendsMap.get(deliveryTag);
} else {
workItem = null;
deliveryTag = null;
}
}
if (workItem == null) {
if (deliveryTag != null) {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("sendData not found for this delivery.");
}
break;
}
Delivery delivery = null;
boolean linkAdvance = false;
int sentMsgSize = 0;
Exception sendException = null;
try {
workItem.beforeTry();
delivery = sender.delivery(deliveryTag.getBytes(UTF_8));
delivery.setMessageFormat(workItem.getMessageFormat());
if (workItem.isDeliveryStateProvided()) {
delivery.disposition(workItem.getDeliveryState());
}
final ReadableBuffer encodedBuffer = workItem.getEncodedBuffer();
encodedBuffer.rewind();
sentMsgSize = sender.send(encodedBuffer);
assert sentMsgSize == workItem.getEncodedMessageSize()
: "Contract of the ProtonJ library for Sender. Send API changed";
linkAdvance = sender.advance();
} catch (Exception exception) {
sendException = exception;
}
if (linkAdvance) {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("Sent message.");
workItem.setWaitingForAck();
scheduler.schedule(new SendTimeout(deliveryTag), retryOptions.getTryTimeout().toMillis(),
TimeUnit.MILLISECONDS);
} else {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.addKeyValue("sentMessageSize", sentMsgSize)
.addKeyValue("payloadActualSize", workItem.getEncodedMessageSize())
.log("Sendlink advance failed.");
DeliveryState outcome = null;
if (delivery != null) {
outcome = delivery.getRemoteState();
delivery.free();
}
final AmqpErrorContext context = handler.getErrorContext(sender);
final Throwable exception = sendException != null
? new OperationCancelledException(String.format(Locale.US,
"Entity(%s): send operation failed. Please see cause for more details", entityPath),
sendException, context)
: new OperationCancelledException(String.format(Locale.US,
"Entity(%s): send operation failed while advancing delivery(tag: %s).",
entityPath, deliveryTag), context);
workItem.error(exception, outcome);
}
}
}
private void processDeliveredMessage(Delivery delivery) {
final DeliveryState outcome = delivery.getRemoteState();
final String deliveryTag = new String(delivery.getTag(), UTF_8);
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("Process delivered message.");
final RetriableWorkItem workItem = pendingSendsMap.remove(deliveryTag);
if (workItem == null) {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("Mismatch (or send timed out).");
return;
} else if (workItem.isDeliveryStateProvided()) {
workItem.success(outcome);
return;
}
if (outcome instanceof Accepted) {
synchronized (errorConditionLock) {
lastKnownLinkError = null;
lastKnownErrorReportedAt = null;
retryAttempts.set(0);
}
workItem.success(outcome);
} else if (outcome instanceof Rejected) {
final Rejected rejected = (Rejected) outcome;
final org.apache.qpid.proton.amqp.transport.ErrorCondition error = rejected.getError();
final Exception exception = ExceptionUtil.toException(error.getCondition().toString(),
error.getDescription(), handler.getErrorContext(sender));
logger.atWarning()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.addKeyValue("rejected", rejected)
.log("Delivery rejected.");
final int retryAttempt;
if (isGeneralSendError(error.getCondition())) {
synchronized (errorConditionLock) {
lastKnownLinkError = exception;
lastKnownErrorReportedAt = Instant.now();
retryAttempt = retryAttempts.incrementAndGet();
}
} else {
retryAttempt = retryAttempts.get();
}
final Duration retryInterval = retry.calculateRetryDelay(exception, retryAttempt);
if (retryInterval == null || retryInterval.compareTo(workItem.getTimeoutTracker().remaining()) > 0) {
cleanupFailedSend(workItem, exception, outcome);
} else {
workItem.setLastKnownException(exception);
try {
reactorProvider.getReactorDispatcher().invoke(() -> sendWork(workItem), retryInterval);
} catch (IOException | RejectedExecutionException schedulerException) {
exception.initCause(schedulerException);
cleanupFailedSend(
workItem,
new AmqpException(false,
String.format(Locale.US, "Entity(%s): send operation failed while scheduling a"
+ " retry on Reactor, see cause for more details.", entityPath),
schedulerException, handler.getErrorContext(sender)),
outcome);
}
}
} else if (outcome instanceof Released) {
cleanupFailedSend(workItem, new OperationCancelledException(outcome.toString(),
handler.getErrorContext(sender)), outcome);
} else if (outcome instanceof Declared) {
final Declared declared = (Declared) outcome;
workItem.success(declared);
} else {
cleanupFailedSend(workItem, new AmqpException(false, outcome.toString(),
handler.getErrorContext(sender)), outcome);
}
}
private void scheduleWorkOnDispatcher() {
try {
reactorProvider.getReactorDispatcher().invoke(this::processSendWork);
} catch (IOException e) {
logger.warning("Error scheduling work on reactor.", e);
} catch (RejectedExecutionException e) {
logger.info("Error scheduling work on reactor because of RejectedExecutionException.");
}
}
private void cleanupFailedSend(final RetriableWorkItem workItem, final Exception exception, final DeliveryState deliveryState) {
workItem.error(exception, deliveryState);
}
private void completeClose() {
isClosedMono.emitEmpty((signalType, result) -> {
addSignalTypeAndResult(logger.atWarning(), signalType, result).log("Unable to emit shutdown signal.");
return false;
});
subscriptions.dispose();
if (tokenManager != null) {
tokenManager.close();
}
}
/**
* Clears pending sends and puts an error in there.
*
* @param error Error to pass to pending sends.
*/
private void handleError(Throwable error) {
synchronized (pendingSendLock) {
if (isDisposed.getAndSet(true)) {
logger.verbose("This was already disposed. Dropping error.");
} else {
logger.atVerbose()
.addKeyValue(PENDING_SENDS_SIZE_KEY, () -> String.valueOf(pendingSendsMap.size()))
.log("Disposing pending sends with error.");
}
pendingSendsMap.forEach((key, value) -> value.error(error, null));
pendingSendsMap.clear();
pendingSendsQueue.clear();
}
completeClose();
}
private void handleClose() {
final String message = String.format("Could not complete sends because link '%s' for '%s' is closed.",
getLinkName(), entityPath);
final AmqpErrorContext context = handler.getErrorContext(sender);
synchronized (pendingSendLock) {
if (isDisposed.getAndSet(true)) {
logger.verbose("This was already disposed.");
} else {
logger.atVerbose()
.addKeyValue(PENDING_SENDS_SIZE_KEY, () -> String.valueOf(pendingSendsMap.size()))
.log("Disposing pending sends.");
}
pendingSendsMap.forEach((key, value) -> value.error(new AmqpException(true, message, context), null));
pendingSendsMap.clear();
pendingSendsQueue.clear();
}
completeClose();
}
private static boolean isGeneralSendError(Symbol amqpError) {
return (amqpError == AmqpErrorCode.SERVER_BUSY_ERROR || amqpError == AmqpErrorCode.TIMEOUT_ERROR
|| amqpError == AmqpErrorCode.RESOURCE_LIMIT_EXCEEDED);
}
private static class WeightedDeliveryTag {
private final String deliveryTag;
private final int priority;
WeightedDeliveryTag(final String deliveryTag, final int priority) {
this.deliveryTag = deliveryTag;
this.priority = priority;
}
private String getDeliveryTag() {
return this.deliveryTag;
}
private int getPriority() {
return this.priority;
}
}
private static class DeliveryTagComparator implements Comparator<WeightedDeliveryTag>, Serializable {
private static final long serialVersionUID = -7057500582037295635L;
@Override
public int compare(WeightedDeliveryTag deliveryTag0, WeightedDeliveryTag deliveryTag1) {
return deliveryTag1.getPriority() - deliveryTag0.getPriority();
}
}
/**
* Keeps track of messages that have been sent, but may not have been acknowledged by the service.
*/
private class SendTimeout implements Runnable {
private final String deliveryTag;
SendTimeout(String deliveryTag) {
this.deliveryTag = deliveryTag;
}
@Override
public void run() {
final RetriableWorkItem workItem = pendingSendsMap.remove(deliveryTag);
if (workItem == null) {
return;
}
Exception cause = lastKnownLinkError;
final Exception lastError;
final Instant lastErrorTime;
synchronized (errorConditionLock) {
lastError = lastKnownLinkError;
lastErrorTime = lastKnownErrorReportedAt;
}
if (lastError != null && lastErrorTime != null) {
final Instant now = Instant.now();
final boolean isLastErrorAfterSleepTime =
lastErrorTime.isAfter(now.minusSeconds(SERVER_BUSY_BASE_SLEEP_TIME_IN_SECS));
final boolean isServerBusy = lastError instanceof AmqpException && isLastErrorAfterSleepTime;
final boolean isLastErrorAfterOperationTimeout =
lastErrorTime.isAfter(now.minus(retryOptions.getTryTimeout()));
cause = isServerBusy || isLastErrorAfterOperationTimeout
? lastError
: null;
}
final AmqpException exception;
if (cause instanceof AmqpException) {
exception = (AmqpException) cause;
} else {
exception = new AmqpException(true, AmqpErrorCondition.TIMEOUT_ERROR,
String.format(Locale.US, "Entity(%s): Send operation timed out", entityPath),
handler.getErrorContext(sender));
}
workItem.error(exception, null);
}
}
} | class ReactorSender implements AmqpSendLink, AsyncCloseable, AutoCloseable {
private static final String DELIVERY_TAG_KEY = "deliveryTag";
private static final String PENDING_SENDS_SIZE_KEY = "pending_sends_size";
private final String entityPath;
private final Sender sender;
private final SendLinkHandler handler;
private final ReactorProvider reactorProvider;
private final Disposable.Composite subscriptions;
private final AtomicBoolean hasConnected = new AtomicBoolean();
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final AtomicInteger retryAttempts = new AtomicInteger();
private final Sinks.Empty<Void> isClosedMono = Sinks.empty();
private final Object pendingSendLock = new Object();
private final ConcurrentHashMap<String, RetriableWorkItem> pendingSendsMap = new ConcurrentHashMap<>();
private final PriorityQueue<WeightedDeliveryTag> pendingSendsQueue =
new PriorityQueue<>(1000, new DeliveryTagComparator());
private final ClientLogger logger;
private final Flux<AmqpEndpointState> endpointStates;
private final TokenManager tokenManager;
private final MessageSerializer messageSerializer;
private final AmqpRetryPolicy retry;
private final AmqpRetryOptions retryOptions;
private final String activeTimeoutMessage;
private final Scheduler scheduler;
private final AmqpMetricsProvider metricsProvider;
private final Object errorConditionLock = new Object();
private volatile Exception lastKnownLinkError;
private volatile Instant lastKnownErrorReportedAt;
private volatile int linkSize;
/**
* Creates an instance of {@link ReactorSender}.
*
* @param amqpConnection The parent {@link AmqpConnection} that this sender lives in.
* @param entityPath The message broker address for the sender.
* @param sender The underlying proton-j sender.
* @param handler The proton-j handler associated with the sender.
* @param reactorProvider Provider to schedule work on the proton-j reactor.
* @param tokenManager Token manager for authorising with the CBS node. Can be {@code null} if it is part of the
* transaction manager.
* @param messageSerializer Serializer to deserialise and serialize AMQP messages.
* @param retryOptions Retry options.
* @param scheduler Scheduler to schedule send timeout.
*/
ReactorSender(AmqpConnection amqpConnection, String entityPath, Sender sender, SendLinkHandler handler,
ReactorProvider reactorProvider, TokenManager tokenManager, MessageSerializer messageSerializer,
AmqpRetryOptions retryOptions, Scheduler scheduler, AmqpMetricsProvider metricsProvider) {
this.entityPath = Objects.requireNonNull(entityPath, "'entityPath' cannot be null.");
this.sender = Objects.requireNonNull(sender, "'sender' cannot be null.");
this.handler = Objects.requireNonNull(handler, "'handler' cannot be null.");
this.reactorProvider = Objects.requireNonNull(reactorProvider, "'reactorProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null.");
this.scheduler = Objects.requireNonNull(scheduler, "'scheduler' cannot be null.");
this.retry = RetryUtil.getRetryPolicy(retryOptions);
this.tokenManager = tokenManager;
this.metricsProvider = metricsProvider;
String connectionId = handler.getConnectionId() == null ? NOT_APPLICABLE : handler.getConnectionId();
String linkName = getLinkName() == null ? NOT_APPLICABLE : getLinkName();
Map<String, Object> loggingContext = createContextWithConnectionId(connectionId);
loggingContext.put(LINK_NAME_KEY, linkName);
loggingContext.put(ENTITY_PATH_KEY, entityPath);
this.logger = new ClientLogger(ReactorSender.class, loggingContext);
this.activeTimeoutMessage = String.format(
"ReactorSender connectionId[%s] linkName[%s]: Waiting for send and receive handler to be ACTIVE",
handler.getConnectionId(), handler.getLinkName());
this.endpointStates = this.handler.getEndpointStates()
.map(state -> {
logger.verbose("State {}", state);
this.hasConnected.set(state == EndpointState.ACTIVE);
return AmqpEndpointStateUtil.getConnectionState(state);
})
.doOnError(error -> {
hasConnected.set(false);
handleError(error);
})
.doOnComplete(() -> {
hasConnected.set(false);
handleClose();
})
.cache(1);
this.subscriptions = Disposables.composite(
this.endpointStates.subscribe(),
this.handler.getDeliveredMessages().subscribe(this::processDeliveredMessage),
this.handler.getLinkCredits().subscribe(credit -> {
logger.atVerbose().addKeyValue("credits", credit)
.log("Credits on link.");
this.scheduleWorkOnDispatcher();
}),
amqpConnection.getShutdownSignals().flatMap(signal -> {
logger.verbose("Shutdown signal received.");
hasConnected.set(false);
return closeAsync("Connection shutdown.", null);
}).subscribe()
);
if (tokenManager != null) {
this.subscriptions.add(tokenManager.getAuthorizationResults().onErrorResume(error -> {
final Mono<Void> operation =
closeAsync(String.format("connectionId[%s] linkName[%s] Token renewal failure. Disposing send "
+ "link.", amqpConnection.getId(), getLinkName()),
new ErrorCondition(Symbol.getSymbol(NOT_ALLOWED.getErrorCondition()),
error.getMessage()));
return operation.then(Mono.empty());
}).subscribe(response -> {
logger.atVerbose().addKeyValue("response", response)
.log("Token refreshed.");
}, error -> {
}, () -> {
logger.verbose(" Authorization completed. Disposing.");
closeAsync("Authorization completed. Disposing.", null).subscribe();
}));
}
}
@Override
public Flux<AmqpEndpointState> getEndpointStates() {
return endpointStates;
}
@Override
public Mono<Void> send(Message message) {
return send(message, null);
}
@Override
public Mono<Void> send(Message message, DeliveryState deliveryState) {
if (isDisposed.get()) {
return Mono.error(new IllegalStateException(String.format(
"connectionId[%s] linkName[%s] Cannot publish message when disposed.", handler.getConnectionId(),
getLinkName())));
}
return getLinkSize()
.flatMap(maxMessageSize -> {
final int payloadSize = messageSerializer.getSize(message);
final int allocationSize =
Math.min(payloadSize + MAX_AMQP_HEADER_SIZE_BYTES, maxMessageSize);
final byte[] bytes = new byte[allocationSize];
int encodedSize;
try {
encodedSize = message.encode(bytes, 0, allocationSize);
} catch (BufferOverflowException exception) {
final String errorMessage =
String.format(Locale.US,
"Error sending. Size of the payload exceeded maximum message size: %s kb",
maxMessageSize / 1024);
final Throwable error = new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
errorMessage, exception, handler.getErrorContext(sender));
return Mono.error(error);
}
return send(bytes, encodedSize, DeliveryImpl.DEFAULT_MESSAGE_FORMAT, deliveryState);
}).then();
}
@Override
public Mono<Void> send(List<Message> messageBatch) {
return send(messageBatch, null);
}
@Override
public Mono<Void> send(List<Message> messageBatch, DeliveryState deliveryState) {
if (isDisposed.get()) {
return Mono.error(new IllegalStateException(String.format(
"connectionId[%s] linkName[%s] Cannot publish data batch when disposed.", handler.getConnectionId(),
getLinkName())));
}
if (messageBatch.size() == 1) {
return send(messageBatch.get(0), deliveryState);
}
return getLinkSize()
.flatMap(maxMessageSize -> {
int totalEncodedSize = 0;
final CompositeReadableBuffer buffer = new CompositeReadableBuffer();
final byte[] envelopBytes = batchEnvelopBytes(messageBatch.get(0), maxMessageSize);
if (envelopBytes.length > 0) {
totalEncodedSize += envelopBytes.length;
if (totalEncodedSize > maxMessageSize) {
return batchBufferOverflowError(maxMessageSize);
}
buffer.append(envelopBytes);
}
for (final Message message : messageBatch) {
final byte[] sectionBytes = batchBinaryDataSectionBytes(message, maxMessageSize);
if (sectionBytes.length > 0) {
totalEncodedSize += sectionBytes.length;
if (totalEncodedSize > maxMessageSize) {
return batchBufferOverflowError(maxMessageSize);
}
buffer.append(sectionBytes);
} else {
logger.info("Ignoring the empty message org.apache.qpid.proton.message.message@{} in the batch.",
Integer.toHexString(System.identityHashCode(message)));
}
}
return send(buffer, AmqpConstants.AMQP_BATCH_MESSAGE_FORMAT, deliveryState);
}).then();
}
private byte[] batchEnvelopBytes(Message envelopMessage, int maxMessageSize) {
final Message message = Proton.message();
message.setMessageAnnotations(envelopMessage.getMessageAnnotations());
if ((envelopMessage.getMessageId() instanceof String)
&& !CoreUtils.isNullOrEmpty((String) envelopMessage.getMessageId())) {
message.setMessageId(envelopMessage.getMessageId());
}
if (!CoreUtils.isNullOrEmpty(envelopMessage.getGroupId())) {
message.setGroupId(envelopMessage.getGroupId());
}
final int size = messageSerializer.getSize(message);
final int allocationSize = Math.min(size + MAX_AMQP_HEADER_SIZE_BYTES, maxMessageSize);
final byte[] encodedBytes = new byte[allocationSize];
final int encodedSize = message.encode(encodedBytes, 0, allocationSize);
return Arrays.copyOf(encodedBytes, encodedSize);
}
private byte[] batchBinaryDataSectionBytes(Message sectionMessage, int maxMessageSize) {
final int size = messageSerializer.getSize(sectionMessage);
final int allocationSize = Math.min(size + MAX_AMQP_HEADER_SIZE_BYTES, maxMessageSize);
final byte[] encodedBytes = new byte[allocationSize];
final int encodedSize = sectionMessage.encode(encodedBytes, 0, allocationSize);
final Message message = Proton.message();
final Data binaryData = new Data(new Binary(encodedBytes, 0, encodedSize));
message.setBody(binaryData);
final int binaryRawSize = binaryData.getValue().getLength();
final int binaryEncodedSize = binaryEncodedSize(binaryRawSize);
final byte[] binaryEncodedBytes = new byte[binaryEncodedSize];
message.encode(binaryEncodedBytes, 0, binaryEncodedSize);
return binaryEncodedBytes;
}
private Mono<Void> batchBufferOverflowError(int maxMessageSize) {
return FluxUtil.monoError(logger, new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED,
String.format(Locale.US, "Size of the payload exceeded maximum message size: %s kb", maxMessageSize / 1024),
new BufferOverflowException(), handler.getErrorContext(sender)));
}
/**
* Compute the encoded size when encoding a binary data of given size per Amqp 1.0 spec "amqp:data:binary" format.
*
* @param binaryRawSize the length of the binary data.
* @return the encoded size.
*/
private int binaryEncodedSize(int binaryRawSize) {
if (binaryRawSize <= 255) {
return 5 + binaryRawSize;
} else {
return 8 + binaryRawSize;
}
}
@Override
public AmqpErrorContext getErrorContext() {
return handler.getErrorContext(sender);
}
@Override
public String getLinkName() {
return sender.getName();
}
@Override
public String getEntityPath() {
return entityPath;
}
@Override
public String getHostname() {
return handler.getHostname();
}
@Override
public Mono<Integer> getLinkSize() {
if (linkSize > 0) {
return Mono.defer(() -> Mono.just(this.linkSize));
}
synchronized (this) {
if (linkSize > 0) {
return Mono.defer(() -> Mono.just(linkSize));
}
return RetryUtil.withRetry(getEndpointStates().takeUntil(state -> state == AmqpEndpointState.ACTIVE),
retryOptions, activeTimeoutMessage)
.then(Mono.fromCallable(() -> {
final UnsignedLong remoteMaxMessageSize = sender.getRemoteMaxMessageSize();
if (remoteMaxMessageSize != null) {
linkSize = remoteMaxMessageSize.intValue();
} else {
logger.warning("Could not get the getRemoteMaxMessageSize. Returning current link size: {}", linkSize);
}
return linkSize;
}));
}
}
@Override
public boolean isDisposed() {
return isDisposed.get();
}
/**
* Blocking call that disposes of the sender.
*
* @see
*/
@Override
public void dispose() {
close();
}
/**
* Blocking call that disposes of the sender.
*
* @see
*/
@Override
public void close() {
closeAsync().block(retryOptions.getTryTimeout());
}
@Override
public Mono<Void> closeAsync() {
return closeAsync("User invoked close operation.", null);
}
/**
* Disposes of the sender.
*
* @param errorCondition Error condition associated with close operation.
* @param message Message associated with why the sender was closed.
*
* @return A mono that completes when the send link has closed.
*/
Mono<Void> closeAsync(String message, ErrorCondition errorCondition) {
if (isDisposed.getAndSet(true)) {
return isClosedMono.asMono();
}
addErrorCondition(logger.atVerbose(), errorCondition)
.log("Setting error condition and disposing. {}", message);
final Runnable closeWork = () -> {
if (errorCondition != null && sender.getCondition() == null) {
sender.setCondition(errorCondition);
}
sender.close();
};
return Mono.fromRunnable(() -> {
try {
reactorProvider.getReactorDispatcher().invoke(closeWork);
} catch (IOException e) {
logger.warning("Could not schedule close work. Running manually. And completing close.", e);
closeWork.run();
handleClose();
} catch (RejectedExecutionException e) {
logger.info("RejectedExecutionException scheduling close work. And completing close.");
closeWork.run();
handleClose();
}
}).then(isClosedMono.asMono())
.publishOn(Schedulers.boundedElastic());
}
/**
* A mono that completes when the sender has completely closed.
*
* @return mono that completes when the sender has completely closed.
*/
Mono<Void> isClosed() {
return isClosedMono.asMono();
}
@Override
Mono<DeliveryState> send(ReadableBuffer buffer, int messageFormat, DeliveryState deliveryState) {
return onEndpointActive().then(Mono.create(sink -> {
sendWork(new RetriableWorkItem(buffer, messageFormat, sink, retryOptions.getTryTimeout(),
deliveryState, metricsProvider));
}));
}
private Flux<EndpointState> onEndpointActive() {
return RetryUtil.withRetry(handler.getEndpointStates().takeUntil(state -> state == EndpointState.ACTIVE),
retryOptions, activeTimeoutMessage);
}
/**
* Add the work item in pending send to be processed on {@link ReactorDispatcher} thread.
*
* @param workItem to be processed.
*/
private void sendWork(RetriableWorkItem workItem) {
final String deliveryTag = UUID.randomUUID().toString().replace("-", "");
synchronized (pendingSendLock) {
this.pendingSendsMap.put(deliveryTag, workItem);
this.pendingSendsQueue.offer(new WeightedDeliveryTag(deliveryTag, workItem.hasBeenRetried() ? 1 : 0));
}
this.scheduleWorkOnDispatcher();
}
/**
* Invokes work on the Reactor. Should only be called from ReactorDispatcher.invoke()
*/
private void processSendWork() {
if (!hasConnected.get()) {
logger.warning("Not connected. Not processing send work.");
return;
}
if (isDisposed.get()) {
logger.info("Sender is closed. Not executing work.");
return;
}
while (hasConnected.get() && sender.getCredit() > 0) {
final WeightedDeliveryTag weightedDelivery;
final RetriableWorkItem workItem;
final String deliveryTag;
synchronized (pendingSendLock) {
weightedDelivery = this.pendingSendsQueue.poll();
if (weightedDelivery != null) {
deliveryTag = weightedDelivery.getDeliveryTag();
workItem = this.pendingSendsMap.get(deliveryTag);
} else {
workItem = null;
deliveryTag = null;
}
}
if (workItem == null) {
if (deliveryTag != null) {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("sendData not found for this delivery.");
}
break;
}
Delivery delivery = null;
boolean linkAdvance = false;
int sentMsgSize = 0;
Exception sendException = null;
try {
workItem.beforeTry();
delivery = sender.delivery(deliveryTag.getBytes(UTF_8));
delivery.setMessageFormat(workItem.getMessageFormat());
if (workItem.isDeliveryStateProvided()) {
delivery.disposition(workItem.getDeliveryState());
}
workItem.send(sender);
linkAdvance = sender.advance();
} catch (Exception exception) {
sendException = exception;
}
if (linkAdvance) {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("Sent message.");
workItem.setWaitingForAck();
scheduler.schedule(new SendTimeout(deliveryTag), retryOptions.getTryTimeout().toMillis(),
TimeUnit.MILLISECONDS);
} else {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.addKeyValue("sentMessageSize", sentMsgSize)
.addKeyValue("payloadActualSize", workItem.getEncodedMessageSize())
.log("Sendlink advance failed.");
DeliveryState outcome = null;
if (delivery != null) {
outcome = delivery.getRemoteState();
delivery.free();
}
final AmqpErrorContext context = handler.getErrorContext(sender);
final Throwable exception = sendException != null
? new OperationCancelledException(String.format(Locale.US,
"Entity(%s): send operation failed. Please see cause for more details", entityPath),
sendException, context)
: new OperationCancelledException(String.format(Locale.US,
"Entity(%s): send operation failed while advancing delivery(tag: %s).",
entityPath, deliveryTag), context);
workItem.error(exception, outcome);
}
}
}
private void processDeliveredMessage(Delivery delivery) {
final DeliveryState outcome = delivery.getRemoteState();
final String deliveryTag = new String(delivery.getTag(), UTF_8);
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("Process delivered message.");
final RetriableWorkItem workItem = pendingSendsMap.remove(deliveryTag);
if (workItem == null) {
logger.atVerbose()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.log("Mismatch (or send timed out).");
return;
} else if (workItem.isDeliveryStateProvided()) {
workItem.success(outcome);
return;
}
if (outcome instanceof Accepted) {
synchronized (errorConditionLock) {
lastKnownLinkError = null;
lastKnownErrorReportedAt = null;
retryAttempts.set(0);
}
workItem.success(outcome);
} else if (outcome instanceof Rejected) {
final Rejected rejected = (Rejected) outcome;
final org.apache.qpid.proton.amqp.transport.ErrorCondition error = rejected.getError();
final Exception exception = ExceptionUtil.toException(error.getCondition().toString(),
error.getDescription(), handler.getErrorContext(sender));
logger.atWarning()
.addKeyValue(DELIVERY_TAG_KEY, deliveryTag)
.addKeyValue("rejected", rejected)
.log("Delivery rejected.");
final int retryAttempt;
if (isGeneralSendError(error.getCondition())) {
synchronized (errorConditionLock) {
lastKnownLinkError = exception;
lastKnownErrorReportedAt = Instant.now();
retryAttempt = retryAttempts.incrementAndGet();
}
} else {
retryAttempt = retryAttempts.get();
}
final Duration retryInterval = retry.calculateRetryDelay(exception, retryAttempt);
if (retryInterval == null || retryInterval.compareTo(workItem.getTimeoutTracker().remaining()) > 0) {
cleanupFailedSend(workItem, exception, outcome);
} else {
workItem.setLastKnownException(exception);
try {
reactorProvider.getReactorDispatcher().invoke(() -> sendWork(workItem), retryInterval);
} catch (IOException | RejectedExecutionException schedulerException) {
exception.initCause(schedulerException);
cleanupFailedSend(
workItem,
new AmqpException(false,
String.format(Locale.US, "Entity(%s): send operation failed while scheduling a"
+ " retry on Reactor, see cause for more details.", entityPath),
schedulerException, handler.getErrorContext(sender)),
outcome);
}
}
} else if (outcome instanceof Released) {
cleanupFailedSend(workItem, new OperationCancelledException(outcome.toString(),
handler.getErrorContext(sender)), outcome);
} else if (outcome instanceof Declared) {
final Declared declared = (Declared) outcome;
workItem.success(declared);
} else {
cleanupFailedSend(workItem, new AmqpException(false, outcome.toString(),
handler.getErrorContext(sender)), outcome);
}
}
private void scheduleWorkOnDispatcher() {
try {
reactorProvider.getReactorDispatcher().invoke(this::processSendWork);
} catch (IOException e) {
logger.warning("Error scheduling work on reactor.", e);
} catch (RejectedExecutionException e) {
logger.info("Error scheduling work on reactor because of RejectedExecutionException.");
}
}
private void cleanupFailedSend(final RetriableWorkItem workItem, final Exception exception, final DeliveryState deliveryState) {
workItem.error(exception, deliveryState);
}
private void completeClose() {
isClosedMono.emitEmpty((signalType, result) -> {
addSignalTypeAndResult(logger.atWarning(), signalType, result).log("Unable to emit shutdown signal.");
return false;
});
subscriptions.dispose();
if (tokenManager != null) {
tokenManager.close();
}
}
/**
* Clears pending sends and puts an error in there.
*
* @param error Error to pass to pending sends.
*/
private void handleError(Throwable error) {
synchronized (pendingSendLock) {
if (isDisposed.getAndSet(true)) {
logger.verbose("This was already disposed. Dropping error.");
} else {
logger.atVerbose()
.addKeyValue(PENDING_SENDS_SIZE_KEY, () -> String.valueOf(pendingSendsMap.size()))
.log("Disposing pending sends with error.");
}
pendingSendsMap.forEach((key, value) -> value.error(error, null));
pendingSendsMap.clear();
pendingSendsQueue.clear();
}
completeClose();
}
private void handleClose() {
final String message = String.format("Could not complete sends because link '%s' for '%s' is closed.",
getLinkName(), entityPath);
final AmqpErrorContext context = handler.getErrorContext(sender);
synchronized (pendingSendLock) {
if (isDisposed.getAndSet(true)) {
logger.verbose("This was already disposed.");
} else {
logger.atVerbose()
.addKeyValue(PENDING_SENDS_SIZE_KEY, () -> String.valueOf(pendingSendsMap.size()))
.log("Disposing pending sends.");
}
pendingSendsMap.forEach((key, value) -> value.error(new AmqpException(true, message, context), null));
pendingSendsMap.clear();
pendingSendsQueue.clear();
}
completeClose();
}
private static boolean isGeneralSendError(Symbol amqpError) {
return (amqpError == AmqpErrorCode.SERVER_BUSY_ERROR || amqpError == AmqpErrorCode.TIMEOUT_ERROR
|| amqpError == AmqpErrorCode.RESOURCE_LIMIT_EXCEEDED);
}
private static class WeightedDeliveryTag {
private final String deliveryTag;
private final int priority;
WeightedDeliveryTag(final String deliveryTag, final int priority) {
this.deliveryTag = deliveryTag;
this.priority = priority;
}
private String getDeliveryTag() {
return this.deliveryTag;
}
private int getPriority() {
return this.priority;
}
}
private static class DeliveryTagComparator implements Comparator<WeightedDeliveryTag>, Serializable {
private static final long serialVersionUID = -7057500582037295635L;
@Override
public int compare(WeightedDeliveryTag deliveryTag0, WeightedDeliveryTag deliveryTag1) {
return deliveryTag1.getPriority() - deliveryTag0.getPriority();
}
}
/**
* Keeps track of messages that have been sent, but may not have been acknowledged by the service.
*/
private class SendTimeout implements Runnable {
private final String deliveryTag;
SendTimeout(String deliveryTag) {
this.deliveryTag = deliveryTag;
}
@Override
public void run() {
final RetriableWorkItem workItem = pendingSendsMap.remove(deliveryTag);
if (workItem == null) {
return;
}
Exception cause = lastKnownLinkError;
final Exception lastError;
final Instant lastErrorTime;
synchronized (errorConditionLock) {
lastError = lastKnownLinkError;
lastErrorTime = lastKnownErrorReportedAt;
}
if (lastError != null && lastErrorTime != null) {
final Instant now = Instant.now();
final boolean isLastErrorAfterSleepTime =
lastErrorTime.isAfter(now.minusSeconds(SERVER_BUSY_BASE_SLEEP_TIME_IN_SECS));
final boolean isServerBusy = lastError instanceof AmqpException && isLastErrorAfterSleepTime;
final boolean isLastErrorAfterOperationTimeout =
lastErrorTime.isAfter(now.minus(retryOptions.getTryTimeout()));
cause = isServerBusy || isLastErrorAfterOperationTimeout
? lastError
: null;
}
final AmqpException exception;
if (cause instanceof AmqpException) {
exception = (AmqpException) cause;
} else {
exception = new AmqpException(true, AmqpErrorCondition.TIMEOUT_ERROR,
String.format(Locale.US, "Entity(%s): Send operation timed out", entityPath),
handler.getErrorContext(sender));
}
workItem.error(exception, null);
}
}
} |
Yea the sleep is for making sure the thread is started. I've updated the test to wait for a notifcation on thread start for each workitem. | public void testActiveThreadMetric() throws Exception {
int maxThreads = 5;
int threadExpiration = 60;
BoundedQueueExecutor executor =
new BoundedQueueExecutor(
maxThreads,
threadExpiration,
TimeUnit.SECONDS,
maxThreads,
10000000,
new ThreadFactoryBuilder()
.setNameFormat("DataflowWorkUnits-%d")
.setDaemon(true)
.build());
StreamingDataflowWorker.ComputationState computationState =
new StreamingDataflowWorker.ComputationState(
"computation",
defaultMapTask(Arrays.asList(makeSourceInstruction(StringUtf8Coder.of()))),
executor,
ImmutableMap.of(),
null);
ShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8("key1"), 1);
MockActiveWork m1 =
new MockActiveWork(1) {
@Override
public void run() {
int count = 0;
while (!exit) {
count += 1;
}
Thread.currentThread().interrupt();
}
};
MockWork m2 =
new MockWork(2) {
@Override
public void run() {
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
};
MockWork m3 =
new MockWork(3) {
@Override
public void run() {
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
};
assertEquals(0, executor.activeCount());
assertTrue(computationState.activateWork(key1Shard1, m1));
executor.execute(m1, m1.getWorkItem().getSerializedSize());
Thread.sleep(1000);
assertEquals(2, executor.activeCount());
assertTrue(computationState.activateWork(key1Shard1, m2));
assertTrue(computationState.activateWork(key1Shard1, m3));
executor.execute(m2, m2.getWorkItem().getSerializedSize());
executor.execute(m3, m3.getWorkItem().getSerializedSize());
Thread.sleep(1000);
assertEquals(4, executor.activeCount());
m1.stop();
executor.shutdown();
} | Thread.sleep(1000); | public void testActiveThreadMetric() throws Exception {
int maxThreads = 5;
int threadExpirationSec = 60;
BoundedQueueExecutor executor =
new BoundedQueueExecutor(
maxThreads,
threadExpirationSec,
TimeUnit.SECONDS,
maxThreads,
10000000,
new ThreadFactoryBuilder()
.setNameFormat("DataflowWorkUnits-%d")
.setDaemon(true)
.build());
ComputationState computationState =
new ComputationState(
"computation",
defaultMapTask(Arrays.asList(makeSourceInstruction(StringUtf8Coder.of()))),
executor,
ImmutableMap.of(),
null);
ShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8("key1"), 1);
Consumer<Work> sleepProcessWorkFn =
unused -> {
synchronized (this) {
this.notify();
}
int count = 0;
while (!stop) {
count += 1;
}
};
Work m2 = createMockWork(2, sleepProcessWorkFn);
Work m3 = createMockWork(3, sleepProcessWorkFn);
Work m4 = createMockWork(4, sleepProcessWorkFn);
assertEquals(0, executor.activeCount());
assertTrue(computationState.activateWork(key1Shard1, m2));
synchronized (this) {
executor.execute(m2, m2.getWorkItem().getSerializedSize());
this.wait();
this.wait();
}
assertEquals(2, executor.activeCount());
assertTrue(computationState.activateWork(key1Shard1, m3));
assertTrue(computationState.activateWork(key1Shard1, m4));
synchronized (this) {
executor.execute(m3, m3.getWorkItem().getSerializedSize());
this.wait();
}
assertEquals(3, executor.activeCount());
synchronized (this) {
executor.execute(m4, m4.getWorkItem().getSerializedSize());
this.wait();
}
assertEquals(4, executor.activeCount());
stop = true;
executor.shutdown();
} | class MockActiveWork extends StreamingDataflowWorker.Work {
public static volatile boolean exit;
public MockActiveWork(long workToken) {
super(
Windmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),
Instant::now,
Collections.emptyList());
exit = false;
}
@Override
public void run() {}
public void stop() {
exit = true;
}
} | class StreamingDataflowWorkerTest {
private static final Logger LOG = LoggerFactory.getLogger(StreamingDataflowWorkerTest.class);
private static final IntervalWindow DEFAULT_WINDOW =
new IntervalWindow(new Instant(1234), Duration.millis(1000));
private static final IntervalWindow WINDOW_AT_ZERO =
new IntervalWindow(new Instant(0), new Instant(1000));
private static final IntervalWindow WINDOW_AT_ONE_SECOND =
new IntervalWindow(new Instant(1000), new Instant(2000));
private static final Coder<IntervalWindow> DEFAULT_WINDOW_CODER = IntervalWindow.getCoder();
private static final Coder<Collection<IntervalWindow>> DEFAULT_WINDOW_COLLECTION_CODER =
CollectionCoder.of(DEFAULT_WINDOW_CODER);
private static final String DEFAULT_COMPUTATION_ID = "computation";
private static final String DEFAULT_MAP_STAGE_NAME = "computation";
private static final String DEFAULT_MAP_SYSTEM_NAME = "computation";
private static final String DEFAULT_OUTPUT_ORIGINAL_NAME = "originalName";
private static final String DEFAULT_OUTPUT_SYSTEM_NAME = "systemName";
private static final String DEFAULT_PARDO_SYSTEM_NAME = "parDo";
private static final String DEFAULT_PARDO_ORIGINAL_NAME = "parDoOriginalName";
private static final String DEFAULT_PARDO_USER_NAME = "parDoUserName";
private static final String DEFAULT_PARDO_STATE_FAMILY = "parDoStateFamily";
private static final String DEFAULT_SOURCE_SYSTEM_NAME = "source";
private static final String DEFAULT_SOURCE_ORIGINAL_NAME = "sourceOriginalName";
private static final String DEFAULT_SINK_SYSTEM_NAME = "sink";
private static final String DEFAULT_SINK_ORIGINAL_NAME = "sinkOriginalName";
private static final String DEFAULT_SOURCE_COMPUTATION_ID = "upstream";
private static final String DEFAULT_KEY_STRING = "key";
private static final long DEFAULT_SHARDING_KEY = 12345;
private static final ByteString DEFAULT_KEY_BYTES = ByteString.copyFromUtf8(DEFAULT_KEY_STRING);
private static final String DEFAULT_DATA_STRING = "data";
private static final String DEFAULT_DESTINATION_STREAM_ID = "out";
private static final Function<GetDataRequest, GetDataResponse> EMPTY_DATA_RESPONDER =
(GetDataRequest request) -> {
GetDataResponse.Builder builder = GetDataResponse.newBuilder();
for (ComputationGetDataRequest compRequest : request.getRequestsList()) {
ComputationGetDataResponse.Builder compBuilder =
builder.addDataBuilder().setComputationId(compRequest.getComputationId());
for (KeyedGetDataRequest keyRequest : compRequest.getRequestsList()) {
KeyedGetDataResponse.Builder keyBuilder =
compBuilder
.addDataBuilder()
.setKey(keyRequest.getKey())
.setShardingKey(keyRequest.getShardingKey());
keyBuilder.addAllValues(keyRequest.getValuesToFetchList());
keyBuilder.addAllBags(keyRequest.getBagsToFetchList());
keyBuilder.addAllWatermarkHolds(keyRequest.getWatermarkHoldsToFetchList());
}
}
return builder.build();
};
private final boolean streamingEngine;
private final Supplier<Long> idGenerator =
new Supplier<Long>() {
private final AtomicLong idGenerator = new AtomicLong(1L);
@Override
public Long get() {
return idGenerator.getAndIncrement();
}
};
@Rule public BlockingFn blockingFn = new BlockingFn();
@Rule public TestRule restoreMDC = new RestoreDataflowLoggingMDC();
@Rule public ErrorCollector errorCollector = new ErrorCollector();
WorkUnitClient mockWorkUnitClient = mock(WorkUnitClient.class);
HotKeyLogger hotKeyLogger = mock(HotKeyLogger.class);
public StreamingDataflowWorkerTest(Boolean streamingEngine) {
this.streamingEngine = streamingEngine;
}
@Parameterized.Parameters(name = "{index}: [streamingEngine={0}]")
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {{false}, {true}});
}
private static CounterUpdate getCounter(Iterable<CounterUpdate> counters, String name) {
for (CounterUpdate counter : counters) {
if (counter.getNameAndKind().getName().equals(name)) {
return counter;
}
}
return null;
}
static Work createMockWork(long workToken) {
return Work.create(
Windmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),
Instant::now,
Collections.emptyList(),
work -> {});
}
static Work createMockWork(long workToken, Consumer<Work> processWorkFn) {
return Work.create(
Windmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),
Instant::now,
Collections.emptyList(),
processWorkFn);
}
private byte[] intervalWindowBytes(IntervalWindow window) throws Exception {
return CoderUtils.encodeToByteArray(
DEFAULT_WINDOW_COLLECTION_CODER, Collections.singletonList(window));
}
private String keyStringForIndex(int index) {
return DEFAULT_KEY_STRING + index;
}
private String dataStringForIndex(long index) {
return DEFAULT_DATA_STRING + index;
}
private ParallelInstruction makeWindowingSourceInstruction(Coder<?> coder) {
CloudObject timerCloudObject =
CloudObject.forClassName(
"com.google.cloud.dataflow.sdk.util.TimerOrElement$TimerOrElementCoder");
List<CloudObject> component =
Collections.singletonList(CloudObjects.asCloudObject(coder, /* sdkComponents= */ null));
Structs.addList(timerCloudObject, PropertyNames.COMPONENT_ENCODINGS, component);
CloudObject encodedCoder = CloudObject.forClassName("kind:windowed_value");
Structs.addBoolean(encodedCoder, PropertyNames.IS_WRAPPER, true);
Structs.addList(
encodedCoder,
PropertyNames.COMPONENT_ENCODINGS,
ImmutableList.of(
timerCloudObject,
CloudObjects.asCloudObject(IntervalWindowCoder.of(), /* sdkComponents= */ null)));
return new ParallelInstruction()
.setSystemName(DEFAULT_SOURCE_SYSTEM_NAME)
.setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME)
.setRead(
new ReadInstruction()
.setSource(
new Source()
.setSpec(CloudObject.forClass(WindowingWindmillReader.class))
.setCodec(encodedCoder)))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setName(Long.toString(idGenerator.get()))
.setCodec(encodedCoder)
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)));
}
private ParallelInstruction makeSourceInstruction(Coder<?> coder) {
return new ParallelInstruction()
.setSystemName(DEFAULT_SOURCE_SYSTEM_NAME)
.setOriginalName(DEFAULT_SOURCE_ORIGINAL_NAME)
.setRead(
new ReadInstruction()
.setSource(
new Source()
.setSpec(CloudObject.forClass(UngroupedWindmillReader.class))
.setCodec(
CloudObjects.asCloudObject(
WindowedValue.getFullCoder(coder, IntervalWindow.getCoder()),
/* sdkComponents= */ null))))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setName(Long.toString(idGenerator.get()))
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setCodec(
CloudObjects.asCloudObject(
WindowedValue.getFullCoder(coder, IntervalWindow.getCoder()),
/* sdkComponents= */ null))));
}
private ParallelInstruction makeDoFnInstruction(
DoFn<?, ?> doFn,
int producerIndex,
Coder<?> outputCoder,
WindowingStrategy<?, ?> windowingStrategy) {
CloudObject spec = CloudObject.forClassName("DoFn");
addString(
spec,
PropertyNames.SERIALIZED_FN,
StringUtils.byteArrayToJsonString(
SerializableUtils.serializeToByteArray(
DoFnInfo.forFn(
doFn,
windowingStrategy /* windowing strategy */,
null /* side input views */,
null /* input coder */,
new TupleTag<>(PropertyNames.OUTPUT) /* main output id */,
DoFnSchemaInformation.create(),
Collections.emptyMap()))));
return new ParallelInstruction()
.setSystemName(DEFAULT_PARDO_SYSTEM_NAME)
.setName(DEFAULT_PARDO_USER_NAME)
.setOriginalName(DEFAULT_PARDO_ORIGINAL_NAME)
.setParDo(
new ParDoInstruction()
.setInput(
new InstructionInput()
.setProducerInstructionIndex(producerIndex)
.setOutputNum(0))
.setNumOutputs(1)
.setUserFn(spec)
.setMultiOutputInfos(
Collections.singletonList(new MultiOutputInfo().setTag(PropertyNames.OUTPUT))))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setName(PropertyNames.OUTPUT)
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setCodec(
CloudObjects.asCloudObject(
WindowedValue.getFullCoder(
outputCoder, windowingStrategy.getWindowFn().windowCoder()),
/* sdkComponents= */ null))));
}
private ParallelInstruction makeDoFnInstruction(
DoFn<?, ?> doFn, int producerIndex, Coder<?> outputCoder) {
WindowingStrategy<?, ?> windowingStrategy =
WindowingStrategy.of(FixedWindows.of(Duration.millis(10)));
return makeDoFnInstruction(doFn, producerIndex, outputCoder, windowingStrategy);
}
private ParallelInstruction makeSinkInstruction(
String streamId,
Coder<?> coder,
int producerIndex,
Coder<? extends BoundedWindow> windowCoder) {
CloudObject spec = CloudObject.forClass(WindmillSink.class);
addString(spec, "stream_id", streamId);
return new ParallelInstruction()
.setSystemName(DEFAULT_SINK_SYSTEM_NAME)
.setOriginalName(DEFAULT_SINK_ORIGINAL_NAME)
.setWrite(
new WriteInstruction()
.setInput(
new InstructionInput()
.setProducerInstructionIndex(producerIndex)
.setOutputNum(0))
.setSink(
new Sink()
.setSpec(spec)
.setCodec(
CloudObjects.asCloudObject(
WindowedValue.getFullCoder(coder, windowCoder),
/* sdkComponents= */ null))));
}
private ParallelInstruction makeSinkInstruction(
Coder<?> coder, int producerIndex, Coder<? extends BoundedWindow> windowCoder) {
return makeSinkInstruction(DEFAULT_DESTINATION_STREAM_ID, coder, producerIndex, windowCoder);
}
private ParallelInstruction makeSinkInstruction(Coder<?> coder, int producerIndex) {
return makeSinkInstruction(coder, producerIndex, IntervalWindow.getCoder());
}
/**
* Returns a {@link MapTask} with the provided {@code instructions} and default values everywhere
* else.
*/
private MapTask defaultMapTask(List<ParallelInstruction> instructions) {
MapTask mapTask =
new MapTask()
.setStageName(DEFAULT_MAP_STAGE_NAME)
.setSystemName(DEFAULT_MAP_SYSTEM_NAME)
.setInstructions(instructions);
mapTask.setFactory(Transport.getJsonFactory());
return mapTask;
}
private Windmill.GetWorkResponse buildInput(String input, byte[] metadata) throws Exception {
Windmill.GetWorkResponse.Builder builder = Windmill.GetWorkResponse.newBuilder();
TextFormat.merge(input, builder);
if (metadata != null) {
Windmill.InputMessageBundle.Builder messageBundleBuilder =
builder.getWorkBuilder(0).getWorkBuilder(0).getMessageBundlesBuilder(0);
for (Windmill.Message.Builder messageBuilder :
messageBundleBuilder.getMessagesBuilderList()) {
messageBuilder.setMetadata(addPaneTag(PaneInfo.NO_FIRING, metadata));
}
}
return builder.build();
}
private Windmill.GetWorkResponse buildSessionInput(
int workToken,
long inputWatermark,
long outputWatermark,
List<Long> inputs,
List<Timer> timers)
throws Exception {
Windmill.WorkItem.Builder builder = Windmill.WorkItem.newBuilder();
builder.setKey(DEFAULT_KEY_BYTES);
builder.setShardingKey(DEFAULT_SHARDING_KEY);
builder.setCacheToken(1);
builder.setWorkToken(workToken);
builder.setOutputDataWatermark(outputWatermark * 1000);
if (!inputs.isEmpty()) {
InputMessageBundle.Builder messageBuilder =
Windmill.InputMessageBundle.newBuilder()
.setSourceComputationId(DEFAULT_SOURCE_COMPUTATION_ID);
for (Long input : inputs) {
messageBuilder.addMessages(
Windmill.Message.newBuilder()
.setTimestamp(input)
.setData(ByteString.copyFromUtf8(dataStringForIndex(input)))
.setMetadata(
addPaneTag(
PaneInfo.NO_FIRING,
intervalWindowBytes(
new IntervalWindow(
new Instant(input),
new Instant(input).plus(Duration.millis(10)))))));
}
builder.addMessageBundles(messageBuilder);
}
if (!timers.isEmpty()) {
builder.setTimers(Windmill.TimerBundle.newBuilder().addAllTimers(timers));
}
return Windmill.GetWorkResponse.newBuilder()
.addWork(
Windmill.ComputationWorkItems.newBuilder()
.setComputationId(DEFAULT_COMPUTATION_ID)
.setInputDataWatermark(inputWatermark * 1000)
.addWork(builder))
.build();
}
private Windmill.GetWorkResponse makeInput(int index, long timestamp) throws Exception {
return makeInput(index, timestamp, keyStringForIndex(index), DEFAULT_SHARDING_KEY);
}
private Windmill.GetWorkResponse makeInput(
int index, long timestamp, String key, long shardingKey) throws Exception {
return buildInput(
"work {"
+ " computation_id: \""
+ DEFAULT_COMPUTATION_ID
+ "\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \""
+ key
+ "\""
+ " sharding_key: "
+ shardingKey
+ " work_token: "
+ index
+ " cache_token: 3"
+ " hot_key_info {"
+ " hot_key_age_usec: 1000000"
+ " }"
+ " message_bundles {"
+ " source_computation_id: \""
+ DEFAULT_SOURCE_COMPUTATION_ID
+ "\""
+ " messages {"
+ " timestamp: "
+ timestamp
+ " data: \"data"
+ index
+ "\""
+ " }"
+ " }"
+ " }"
+ "}",
CoderUtils.encodeToByteArray(
CollectionCoder.of(IntervalWindow.getCoder()),
Collections.singletonList(DEFAULT_WINDOW)));
}
/**
* Returns a {@link org.apache.beam.runners.dataflow.windmill.Windmill.WorkItemCommitRequest}
* builder parsed from the provided text format proto.
*/
private WorkItemCommitRequest.Builder parseCommitRequest(String output) throws Exception {
WorkItemCommitRequest.Builder builder = Windmill.WorkItemCommitRequest.newBuilder();
TextFormat.merge(output, builder);
return builder;
}
/** Sets the metadata of all the contained messages in this WorkItemCommitRequest. */
private WorkItemCommitRequest.Builder setMessagesMetadata(
PaneInfo pane, byte[] windowBytes, WorkItemCommitRequest.Builder builder) throws Exception {
if (windowBytes != null) {
KeyedMessageBundle.Builder bundles = builder.getOutputMessagesBuilder(0).getBundlesBuilder(0);
for (int i = 0; i < bundles.getMessagesCount(); i++) {
bundles.getMessagesBuilder(i).setMetadata(addPaneTag(pane, windowBytes));
}
}
return builder;
}
/** Reset value update timestamps to zero. */
private WorkItemCommitRequest.Builder setValuesTimestamps(WorkItemCommitRequest.Builder builder) {
for (int i = 0; i < builder.getValueUpdatesCount(); i++) {
builder.getValueUpdatesBuilder(i).getValueBuilder().setTimestamp(0);
}
return builder;
}
private WorkItemCommitRequest.Builder makeExpectedOutput(int index, long timestamp)
throws Exception {
return makeExpectedOutput(
index, timestamp, keyStringForIndex(index), DEFAULT_SHARDING_KEY, keyStringForIndex(index));
}
private WorkItemCommitRequest.Builder makeExpectedOutput(
int index, long timestamp, String key, long shardingKey, String outKey) throws Exception {
StringBuilder expectedCommitRequestBuilder =
initializeExpectedCommitRequest(key, shardingKey, index);
appendCommitOutputMessages(expectedCommitRequestBuilder, index, timestamp, outKey);
return setMessagesMetadata(
PaneInfo.NO_FIRING,
intervalWindowBytes(DEFAULT_WINDOW),
parseCommitRequest(expectedCommitRequestBuilder.toString()));
}
private WorkItemCommitRequest removeDynamicFields(WorkItemCommitRequest request) {
return request.toBuilder().clearPerWorkItemLatencyAttributions().build();
}
private WorkItemCommitRequest.Builder makeExpectedTruncationRequestOutput(
int index, String key, long shardingKey, long estimatedSize) throws Exception {
StringBuilder expectedCommitRequestBuilder =
initializeExpectedCommitRequest(key, shardingKey, index, false);
appendCommitTruncationFields(expectedCommitRequestBuilder, estimatedSize);
return parseCommitRequest(expectedCommitRequestBuilder.toString());
}
private StringBuilder initializeExpectedCommitRequest(
String key, long shardingKey, int index, Boolean hasSourceBytesProcessed) {
StringBuilder requestBuilder = new StringBuilder();
requestBuilder.append("key: \"");
requestBuilder.append(key);
requestBuilder.append("\" ");
requestBuilder.append("sharding_key: ");
requestBuilder.append(shardingKey);
requestBuilder.append(" ");
requestBuilder.append("work_token: ");
requestBuilder.append(index);
requestBuilder.append(" ");
requestBuilder.append("cache_token: 3 ");
if (hasSourceBytesProcessed) requestBuilder.append("source_bytes_processed: 0 ");
return requestBuilder;
}
private StringBuilder initializeExpectedCommitRequest(String key, long shardingKey, int index) {
return initializeExpectedCommitRequest(key, shardingKey, index, true);
}
private StringBuilder appendCommitOutputMessages(
StringBuilder requestBuilder, int index, long timestamp, String outKey) {
requestBuilder.append("output_messages {");
requestBuilder.append(" destination_stream_id: \"");
requestBuilder.append(DEFAULT_DESTINATION_STREAM_ID);
requestBuilder.append("\"");
requestBuilder.append(" bundles {");
requestBuilder.append(" key: \"");
requestBuilder.append(outKey);
requestBuilder.append("\"");
requestBuilder.append(" messages {");
requestBuilder.append(" timestamp: ");
requestBuilder.append(timestamp);
requestBuilder.append(" data: \"");
requestBuilder.append(dataStringForIndex(index));
requestBuilder.append("\"");
requestBuilder.append(" metadata: \"\"");
requestBuilder.append(" }");
requestBuilder.append(" messages_ids: \"\"");
requestBuilder.append(" }");
requestBuilder.append("}");
return requestBuilder;
}
private StringBuilder appendCommitTruncationFields(
StringBuilder requestBuilder, long estimatedSize) {
requestBuilder.append("exceeds_max_work_item_commit_bytes: true ");
requestBuilder.append("estimated_work_item_commit_bytes: ");
requestBuilder.append(estimatedSize);
return requestBuilder;
}
private StreamingComputationConfig makeDefaultStreamingComputationConfig(
List<ParallelInstruction> instructions) {
StreamingComputationConfig config = new StreamingComputationConfig();
config.setComputationId(DEFAULT_COMPUTATION_ID);
config.setSystemName(DEFAULT_MAP_SYSTEM_NAME);
config.setStageName(DEFAULT_MAP_STAGE_NAME);
config.setInstructions(instructions);
return config;
}
private ByteString addPaneTag(PaneInfo pane, byte[] windowBytes) throws IOException {
ByteStringOutputStream output = new ByteStringOutputStream();
PaneInfo.PaneInfoCoder.INSTANCE.encode(pane, output, Context.OUTER);
output.write(windowBytes);
return output.toByteString();
}
private StreamingDataflowWorkerOptions createTestingPipelineOptions(
FakeWindmillServer server, String... args) {
List<String> argsList = Lists.newArrayList(args);
if (streamingEngine) {
argsList.add("--experiments=enable_streaming_engine");
}
StreamingDataflowWorkerOptions options =
PipelineOptionsFactory.fromArgs(argsList.toArray(new String[0]))
.as(StreamingDataflowWorkerOptions.class);
options.setAppName("StreamingWorkerHarnessTest");
options.setJobId("test_job_id");
options.setStreaming(true);
options.setWindmillServerStub(server);
options.setActiveWorkRefreshPeriodMillis(0);
return options;
}
private StreamingDataflowWorker makeWorker(
List<ParallelInstruction> instructions,
StreamingDataflowWorkerOptions options,
boolean publishCounters,
Supplier<Instant> clock,
Function<String, ScheduledExecutorService> executorSupplier)
throws Exception {
StreamingDataflowWorker worker =
new StreamingDataflowWorker(
Collections.singletonList(defaultMapTask(instructions)),
IntrinsicMapTaskExecutorFactory.defaultFactory(),
mockWorkUnitClient,
options,
publishCounters,
hotKeyLogger,
clock,
executorSupplier);
worker.addStateNameMappings(
ImmutableMap.of(DEFAULT_PARDO_USER_NAME, DEFAULT_PARDO_STATE_FAMILY));
return worker;
}
private StreamingDataflowWorker makeWorker(
List<ParallelInstruction> instructions,
StreamingDataflowWorkerOptions options,
boolean publishCounters)
throws Exception {
return makeWorker(
instructions,
options,
publishCounters,
Instant::now,
(threadName) -> Executors.newSingleThreadScheduledExecutor());
}
@Test
public void testBasicHarness() throws Exception {
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
final int numIters = 2000;
for (int i = 0; i < numIters; ++i) {
server.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));
}
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(numIters);
worker.stop();
for (int i = 0; i < numIters; ++i) {
assertTrue(result.containsKey((long) i));
assertEquals(
makeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),
removeDynamicFields(result.get((long) i)));
}
verify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());
}
@Test
public void testBasic() throws Exception {
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server.setIsReady(false);
StreamingConfigTask streamingConfig = new StreamingConfigTask();
streamingConfig.setStreamingComputationConfigs(
ImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));
streamingConfig.setWindmillServiceEndpoint("foo");
WorkItem workItem = new WorkItem();
workItem.setStreamingConfigTask(streamingConfig);
when(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
final int numIters = 2000;
for (int i = 0; i < numIters; ++i) {
server.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));
}
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(numIters);
worker.stop();
for (int i = 0; i < numIters; ++i) {
assertTrue(result.containsKey((long) i));
assertEquals(
makeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),
removeDynamicFields(result.get((long) i)));
}
verify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());
}
@Test
public void testHotKeyLogging() throws Exception {
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())),
makeSinkInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server.setIsReady(false);
StreamingConfigTask streamingConfig = new StreamingConfigTask();
streamingConfig.setStreamingComputationConfigs(
ImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));
streamingConfig.setWindmillServiceEndpoint("foo");
WorkItem workItem = new WorkItem();
workItem.setStreamingConfigTask(streamingConfig);
when(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));
StreamingDataflowWorkerOptions options =
createTestingPipelineOptions(server, "--hotKeyLoggingEnabled=true");
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
final int numIters = 2000;
for (int i = 0; i < numIters; ++i) {
server
.whenGetWorkCalled()
.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i), "key", DEFAULT_SHARDING_KEY));
}
server.waitForAndGetCommits(numIters);
worker.stop();
verify(hotKeyLogger, atLeastOnce())
.logHotKeyDetection(nullable(String.class), any(), eq("key"));
}
@Test
public void testHotKeyLoggingNotEnabled() throws Exception {
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of())),
makeSinkInstruction(KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of()), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server.setIsReady(false);
StreamingConfigTask streamingConfig = new StreamingConfigTask();
streamingConfig.setStreamingComputationConfigs(
ImmutableList.of(makeDefaultStreamingComputationConfig(instructions)));
streamingConfig.setWindmillServiceEndpoint("foo");
WorkItem workItem = new WorkItem();
workItem.setStreamingConfigTask(streamingConfig);
when(mockWorkUnitClient.getGlobalStreamingConfigWorkItem()).thenReturn(Optional.of(workItem));
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
final int numIters = 2000;
for (int i = 0; i < numIters; ++i) {
server
.whenGetWorkCalled()
.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i), "key", DEFAULT_SHARDING_KEY));
}
server.waitForAndGetCommits(numIters);
worker.stop();
verify(hotKeyLogger, atLeastOnce()).logHotKeyDetection(nullable(String.class), any());
}
@Test
public void testIgnoreRetriedKeys() throws Exception {
final int numIters = 4;
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(blockingFn, 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
for (int i = 0; i < numIters; ++i) {
server
.whenGetWorkCalled()
.thenReturn(
makeInput(
i, TimeUnit.MILLISECONDS.toMicros(i), keyStringForIndex(i), DEFAULT_SHARDING_KEY))
.thenReturn(
makeInput(
i + 1000,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY + 1));
}
BlockingFn.counter.acquire(numIters * 2);
for (int i = 0; i < numIters; ++i) {
server
.whenGetWorkCalled()
.thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)))
.thenReturn(
makeInput(
i + 1000,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY + 1));
}
server.waitForEmptyWorkQueue();
for (int i = 0; i < numIters; ++i) {
server
.whenGetWorkCalled()
.thenReturn(
makeInput(
i + numIters,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY));
}
server.waitForEmptyWorkQueue();
BlockingFn.blocker.countDown();
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(numIters * 3);
for (int i = 0; i < numIters; ++i) {
assertTrue(result.containsKey((long) i));
assertEquals(
makeExpectedOutput(i, TimeUnit.MILLISECONDS.toMicros(i)).build(),
removeDynamicFields(result.get((long) i)));
assertTrue(result.containsKey((long) i + 1000));
assertEquals(
makeExpectedOutput(
i + 1000,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY + 1,
keyStringForIndex(i))
.build(),
removeDynamicFields(result.get((long) i + 1000)));
assertTrue(result.containsKey((long) i + numIters));
assertEquals(
makeExpectedOutput(
i + numIters,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY,
keyStringForIndex(i))
.build(),
removeDynamicFields(result.get((long) i + numIters)));
}
for (int i = 0; i < numIters; ++i) {
server
.whenGetWorkCalled()
.thenReturn(
makeInput(
i + numIters * 2,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY));
}
result = server.waitForAndGetCommits(numIters);
worker.stop();
for (int i = 0; i < numIters; ++i) {
assertTrue(result.containsKey((long) i + numIters * 2));
assertEquals(
makeExpectedOutput(
i + numIters * 2,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY,
keyStringForIndex(i))
.build(),
removeDynamicFields(result.get((long) i + numIters * 2)));
}
}
@Test(timeout = 10000)
public void testNumberOfWorkerHarnessThreadsIsHonored() throws Exception {
int expectedNumberOfThreads = 5;
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(blockingFn, 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setNumberOfWorkerHarnessThreads(expectedNumberOfThreads);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
for (int i = 0; i < expectedNumberOfThreads * 2; ++i) {
server.whenGetWorkCalled().thenReturn(makeInput(i, TimeUnit.MILLISECONDS.toMicros(i)));
}
BlockingFn.counter.acquire(expectedNumberOfThreads);
if (BlockingFn.counter.tryAcquire(500, TimeUnit.MILLISECONDS)) {
fail(
"Expected number of threads "
+ expectedNumberOfThreads
+ " does not match actual "
+ "number of work items processed concurrently "
+ BlockingFn.callCounter.get()
+ ".");
}
BlockingFn.blocker.countDown();
}
@Test
public void testKeyTokenInvalidException() throws Exception {
if (streamingEngine) {
return;
}
KvCoder<String, String> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(kvCoder),
makeDoFnInstruction(new KeyTokenInvalidFn(), 0, kvCoder),
makeSinkInstruction(kvCoder, 1));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server
.whenGetWorkCalled()
.thenReturn(makeInput(0, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY));
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);
worker.start();
server.waitForEmptyWorkQueue();
server
.whenGetWorkCalled()
.thenReturn(makeInput(1, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
assertEquals(
makeExpectedOutput(1, 0, DEFAULT_KEY_STRING, DEFAULT_SHARDING_KEY, DEFAULT_KEY_STRING)
.build(),
removeDynamicFields(result.get(1L)));
assertEquals(1, result.size());
}
@Test
public void testKeyCommitTooLargeException() throws Exception {
KvCoder<String, String> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(kvCoder),
makeDoFnInstruction(new LargeCommitFn(), 0, kvCoder),
makeSinkInstruction(kvCoder, 1));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server.setExpectedExceptionCount(1);
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);
worker.setMaxWorkItemCommitBytes(1000);
worker.start();
server
.whenGetWorkCalled()
.thenReturn(makeInput(1, 0, "large_key", DEFAULT_SHARDING_KEY))
.thenReturn(makeInput(2, 0, "key", DEFAULT_SHARDING_KEY));
server.waitForEmptyWorkQueue();
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
assertEquals(2, result.size());
assertEquals(
makeExpectedOutput(2, 0, "key", DEFAULT_SHARDING_KEY, "key").build(),
removeDynamicFields(result.get(2L)));
assertTrue(result.containsKey(1L));
WorkItemCommitRequest largeCommit = result.get(1L);
assertEquals("large_key", largeCommit.getKey().toStringUtf8());
assertEquals(
makeExpectedTruncationRequestOutput(
1, "large_key", DEFAULT_SHARDING_KEY, largeCommit.getEstimatedWorkItemCommitBytes())
.build(),
largeCommit);
assertTrue(largeCommit.getEstimatedWorkItemCommitBytes() > 1000);
int maxTries = 10;
while (--maxTries > 0) {
worker.reportPeriodicWorkerUpdates();
Uninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);
}
ArgumentCaptor<WorkItemStatus> workItemStatusCaptor =
ArgumentCaptor.forClass(WorkItemStatus.class);
verify(mockWorkUnitClient, atLeast(2)).reportWorkItemStatus(workItemStatusCaptor.capture());
List<WorkItemStatus> capturedStatuses = workItemStatusCaptor.getAllValues();
boolean foundErrors = false;
for (WorkItemStatus status : capturedStatuses) {
if (!status.getErrors().isEmpty()) {
assertFalse(foundErrors);
foundErrors = true;
String errorMessage = status.getErrors().get(0).getMessage();
assertThat(errorMessage, Matchers.containsString("KeyCommitTooLargeException"));
}
}
assertTrue(foundErrors);
}
@Test
public void testKeyChange() throws Exception {
KvCoder<String, String> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(kvCoder),
makeDoFnInstruction(new ChangeKeysFn(), 0, kvCoder),
makeSinkInstruction(kvCoder, 1));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
for (int i = 0; i < 2; i++) {
server
.whenGetWorkCalled()
.thenReturn(
makeInput(
i, TimeUnit.MILLISECONDS.toMicros(i), keyStringForIndex(i), DEFAULT_SHARDING_KEY))
.thenReturn(
makeInput(
i + 1000,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY + i));
}
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);
worker.start();
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(4);
for (int i = 0; i < 2; i++) {
assertTrue(result.containsKey((long) i));
assertEquals(
makeExpectedOutput(
i,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY,
keyStringForIndex(i) + "_data" + i)
.build(),
removeDynamicFields(result.get((long) i)));
assertTrue(result.containsKey((long) i + 1000));
assertEquals(
makeExpectedOutput(
i + 1000,
TimeUnit.MILLISECONDS.toMicros(i),
keyStringForIndex(i),
DEFAULT_SHARDING_KEY + i,
keyStringForIndex(i) + "_data" + (i + 1000))
.build(),
removeDynamicFields(result.get((long) i + 1000)));
}
}
@Test(timeout = 30000)
public void testExceptions() throws Exception {
if (streamingEngine) {
return;
}
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(new TestExceptionFn(), 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 1));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server.setExpectedExceptionCount(1);
String keyString = keyStringForIndex(0);
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \""
+ DEFAULT_COMPUTATION_ID
+ "\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \""
+ keyString
+ "\""
+ " sharding_key: 1"
+ " work_token: 0"
+ " cache_token: 1"
+ " message_bundles {"
+ " source_computation_id: \""
+ DEFAULT_SOURCE_COMPUTATION_ID
+ "\""
+ " messages {"
+ " timestamp: 0"
+ " data: \"0\""
+ " }"
+ " }"
+ " }"
+ "}",
CoderUtils.encodeToByteArray(
CollectionCoder.of(IntervalWindow.getCoder()),
Collections.singletonList(DEFAULT_WINDOW))));
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);
worker.start();
server.waitForEmptyWorkQueue();
int maxTries = 10;
while (maxTries-- > 0 && !worker.workExecutorIsEmpty()) {
Uninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);
}
assertTrue(worker.workExecutorIsEmpty());
maxTries = 10;
while (maxTries-- > 0) {
worker.reportPeriodicWorkerUpdates();
Uninterruptibles.sleepUninterruptibly(1000, TimeUnit.MILLISECONDS);
}
ArgumentCaptor<WorkItemStatus> workItemStatusCaptor =
ArgumentCaptor.forClass(WorkItemStatus.class);
verify(mockWorkUnitClient, atLeast(1)).reportWorkItemStatus(workItemStatusCaptor.capture());
List<WorkItemStatus> capturedStatuses = workItemStatusCaptor.getAllValues();
boolean foundErrors = false;
int lastUpdateWithoutErrors = 0;
int lastUpdateWithErrors = 0;
for (WorkItemStatus status : capturedStatuses) {
if (status.getErrors().isEmpty()) {
lastUpdateWithoutErrors++;
continue;
}
lastUpdateWithErrors++;
assertFalse(foundErrors);
foundErrors = true;
String stacktrace = status.getErrors().get(0).getMessage();
assertThat(stacktrace, Matchers.containsString("Exception!"));
assertThat(stacktrace, Matchers.containsString("Another exception!"));
assertThat(stacktrace, Matchers.containsString("processElement"));
}
assertTrue(foundErrors);
assertTrue(lastUpdateWithoutErrors > lastUpdateWithErrors);
assertThat(server.getStatsReceived().size(), Matchers.greaterThanOrEqualTo(1));
Windmill.ReportStatsRequest stats = server.getStatsReceived().get(0);
assertEquals(DEFAULT_COMPUTATION_ID, stats.getComputationId());
assertEquals(keyString, stats.getKey().toStringUtf8());
assertEquals(0, stats.getWorkToken());
assertEquals(1, stats.getShardingKey());
}
@Test
public void testAssignWindows() throws Exception {
Duration gapDuration = Duration.standardSeconds(1);
CloudObject spec = CloudObject.forClassName("AssignWindowsDoFn");
SdkComponents sdkComponents = SdkComponents.create();
sdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);
addString(
spec,
PropertyNames.SERIALIZED_FN,
StringUtils.byteArrayToJsonString(
WindowingStrategyTranslation.toMessageProto(
WindowingStrategy.of(FixedWindows.of(gapDuration)), sdkComponents)
.toByteArray()));
ParallelInstruction addWindowsInstruction =
new ParallelInstruction()
.setSystemName("AssignWindows")
.setName("AssignWindows")
.setOriginalName("AssignWindowsOriginal")
.setParDo(
new ParDoInstruction()
.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))
.setNumOutputs(1)
.setUserFn(spec))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setName("output")
.setCodec(
CloudObjects.asCloudObject(
WindowedValue.getFullCoder(
StringUtf8Coder.of(), IntervalWindow.getCoder()),
/* sdkComponents= */ null))));
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
addWindowsInstruction,
makeSinkInstruction(StringUtf8Coder.of(), 1));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
int timestamp1 = 0;
int timestamp2 = 1000000;
server
.whenGetWorkCalled()
.thenReturn(makeInput(timestamp1, timestamp1))
.thenReturn(makeInput(timestamp2, timestamp2));
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);
worker.start();
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(2);
assertThat(
removeDynamicFields(result.get((long) timestamp1)),
equalTo(
setMessagesMetadata(
PaneInfo.NO_FIRING,
intervalWindowBytes(WINDOW_AT_ZERO),
makeExpectedOutput(timestamp1, timestamp1))
.build()));
assertThat(
removeDynamicFields(result.get((long) timestamp2)),
equalTo(
setMessagesMetadata(
PaneInfo.NO_FIRING,
intervalWindowBytes(WINDOW_AT_ONE_SECOND),
makeExpectedOutput(timestamp2, timestamp2))
.build()));
}
private void verifyTimers(WorkItemCommitRequest commit, Timer... timers) {
assertThat(commit.getOutputTimersList(), Matchers.containsInAnyOrder(timers));
}
private void verifyHolds(WorkItemCommitRequest commit, WatermarkHold... watermarkHolds) {
assertThat(commit.getWatermarkHoldsList(), Matchers.containsInAnyOrder(watermarkHolds));
}
private Timer buildWatermarkTimer(String tagPrefix, long timestampMillis) {
return buildWatermarkTimer(tagPrefix, timestampMillis, false);
}
private Timer buildWatermarkTimer(String tagPrefix, long timestampMillis, boolean delete) {
Timer.Builder builder =
Timer.newBuilder()
.setTag(ByteString.copyFromUtf8(tagPrefix + ":" + timestampMillis))
.setType(Type.WATERMARK)
.setStateFamily("MergeWindows");
if (!delete) {
builder.setTimestamp(timestampMillis * 1000);
builder.setMetadataTimestamp(timestampMillis * 1000);
}
return builder.build();
}
private WatermarkHold buildHold(String tag, long timestamp, boolean reset) {
WatermarkHold.Builder builder =
WatermarkHold.newBuilder()
.setTag(ByteString.copyFromUtf8(tag))
.setStateFamily("MergeWindows");
if (reset) {
builder.setReset(true);
}
if (timestamp >= 0) {
builder.addTimestamps(timestamp * 1000);
}
return builder.build();
}
@Test
public void testMergeWindows() throws Exception {
Coder<KV<String, String>> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());
Coder<WindowedValue<KV<String, String>>> windowedKvCoder =
FullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());
KvCoder<String, List<String>> groupedCoder =
KvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));
Coder<WindowedValue<KV<String, List<String>>>> windowedGroupedCoder =
FullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());
CloudObject spec = CloudObject.forClassName("MergeWindowsDoFn");
SdkComponents sdkComponents = SdkComponents.create();
sdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);
addString(
spec,
PropertyNames.SERIALIZED_FN,
StringUtils.byteArrayToJsonString(
WindowingStrategyTranslation.toMessageProto(
WindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1)))
.withTimestampCombiner(TimestampCombiner.EARLIEST),
sdkComponents)
.toByteArray()));
addObject(
spec,
WorkerPropertyNames.INPUT_CODER,
CloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));
ParallelInstruction mergeWindowsInstruction =
new ParallelInstruction()
.setSystemName("MergeWindows-System")
.setName("MergeWindowsStep")
.setOriginalName("MergeWindowsOriginal")
.setParDo(
new ParDoInstruction()
.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))
.setNumOutputs(1)
.setUserFn(spec))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setName("output")
.setCodec(
CloudObjects.asCloudObject(
windowedGroupedCoder, /* sdkComponents= */ null))));
List<ParallelInstruction> instructions =
Arrays.asList(
makeWindowingSourceInstruction(kvCoder),
mergeWindowsInstruction,
makeSinkInstruction(groupedCoder, 1));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);
Map<String, String> nameMap = new HashMap<>();
nameMap.put("MergeWindowsStep", "MergeWindows");
worker.addStateNameMappings(nameMap);
worker.start();
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \""
+ DEFAULT_COMPUTATION_ID
+ "\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \""
+ DEFAULT_KEY_STRING
+ "\""
+ " sharding_key: "
+ DEFAULT_SHARDING_KEY
+ " cache_token: 1"
+ " work_token: 1"
+ " message_bundles {"
+ " source_computation_id: \""
+ DEFAULT_SOURCE_COMPUTATION_ID
+ "\""
+ " messages {"
+ " timestamp: 0"
+ " data: \""
+ dataStringForIndex(0)
+ "\""
+ " }"
+ " }"
+ " }"
+ "}",
intervalWindowBytes(WINDOW_AT_ZERO)));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Iterable<CounterUpdate> counters = worker.buildCounters();
String window = "/gAAAAAAAA-joBw/";
String timerTagPrefix = "/s" + window + "+0";
ByteString bufferTag = ByteString.copyFromUtf8(window + "+ubuf");
ByteString paneInfoTag = ByteString.copyFromUtf8(window + "+upane");
String watermarkDataHoldTag = window + "+uhold";
String watermarkExtraHoldTag = window + "+uextra";
String stateFamily = "MergeWindows";
ByteString bufferData = ByteString.copyFromUtf8("data0");
ByteString outputData =
ByteString.copyFrom(
new byte[] {
(byte) 0xff,
(byte) 0xff,
(byte) 0xff,
(byte) 0xff,
0x01,
0x05,
0x64,
0x61,
0x74,
0x61,
0x30,
0x00
});
long timerTimestamp = 999000L;
WorkItemCommitRequest actualOutput = result.get(1L);
verifyTimers(actualOutput, buildWatermarkTimer(timerTagPrefix, 999));
assertThat(
actualOutput.getBagUpdatesList(),
Matchers.contains(
Matchers.equalTo(
Windmill.TagBag.newBuilder()
.setTag(bufferTag)
.setStateFamily(stateFamily)
.addValues(bufferData)
.build())));
verifyHolds(actualOutput, buildHold(watermarkDataHoldTag, 0, false));
assertEquals(0L, splitIntToLong(getCounter(counters, "WindmillStateBytesRead").getInteger()));
assertEquals(
Windmill.WorkItemCommitRequest.newBuilder(actualOutput)
.clearCounterUpdates()
.clearOutputMessages()
.clearPerWorkItemLatencyAttributions()
.build()
.getSerializedSize(),
splitIntToLong(getCounter(counters, "WindmillStateBytesWritten").getInteger()));
assertEquals(
VarInt.getLength(0L)
+ dataStringForIndex(0).length()
+ addPaneTag(PaneInfo.NO_FIRING, intervalWindowBytes(WINDOW_AT_ZERO)).size()
+ 5L
,
splitIntToLong(getCounter(counters, "WindmillShuffleBytesRead").getInteger()));
Windmill.GetWorkResponse.Builder getWorkResponse = Windmill.GetWorkResponse.newBuilder();
getWorkResponse
.addWorkBuilder()
.setComputationId(DEFAULT_COMPUTATION_ID)
.setInputDataWatermark(timerTimestamp + 1000)
.addWorkBuilder()
.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))
.setShardingKey(DEFAULT_SHARDING_KEY)
.setWorkToken(2)
.setCacheToken(1)
.getTimersBuilder()
.addTimers(buildWatermarkTimer(timerTagPrefix, timerTimestamp));
server.whenGetWorkCalled().thenReturn(getWorkResponse.build());
long expectedBytesRead = 0L;
Windmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();
Windmill.KeyedGetDataResponse.Builder dataBuilder =
dataResponse
.addDataBuilder()
.setComputationId(DEFAULT_COMPUTATION_ID)
.addDataBuilder()
.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))
.setShardingKey(DEFAULT_SHARDING_KEY);
dataBuilder
.addBagsBuilder()
.setTag(bufferTag)
.setStateFamily(stateFamily)
.addValues(bufferData);
dataBuilder
.addWatermarkHoldsBuilder()
.setTag(ByteString.copyFromUtf8(watermarkDataHoldTag))
.setStateFamily(stateFamily)
.addTimestamps(0);
dataBuilder
.addWatermarkHoldsBuilder()
.setTag(ByteString.copyFromUtf8(watermarkExtraHoldTag))
.setStateFamily(stateFamily)
.addTimestamps(0);
dataBuilder
.addValuesBuilder()
.setTag(paneInfoTag)
.setStateFamily(stateFamily)
.getValueBuilder()
.setTimestamp(0)
.setData(ByteString.EMPTY);
server.whenGetDataCalled().thenReturn(dataResponse.build());
expectedBytesRead += dataBuilder.build().getSerializedSize();
result = server.waitForAndGetCommits(1);
counters = worker.buildCounters();
actualOutput = result.get(2L);
assertEquals(1, actualOutput.getOutputMessagesCount());
assertEquals(
DEFAULT_DESTINATION_STREAM_ID, actualOutput.getOutputMessages(0).getDestinationStreamId());
assertEquals(
DEFAULT_KEY_STRING,
actualOutput.getOutputMessages(0).getBundles(0).getKey().toStringUtf8());
assertEquals(0, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getTimestamp());
assertEquals(
outputData, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getData());
ByteString metadata =
actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getMetadata();
InputStream inStream = metadata.newInput();
assertEquals(
PaneInfo.createPane(true, true, Timing.ON_TIME), PaneInfoCoder.INSTANCE.decode(inStream));
assertEquals(
Collections.singletonList(WINDOW_AT_ZERO),
DEFAULT_WINDOW_COLLECTION_CODER.decode(inStream, Coder.Context.OUTER));
assertThat(
"" + actualOutput.getValueUpdatesList(),
actualOutput.getValueUpdatesList(),
Matchers.contains(
Matchers.equalTo(
Windmill.TagValue.newBuilder()
.setTag(paneInfoTag)
.setStateFamily(stateFamily)
.setValue(
Windmill.Value.newBuilder()
.setTimestamp(Long.MAX_VALUE)
.setData(ByteString.EMPTY))
.build())));
assertThat(
"" + actualOutput.getBagUpdatesList(),
actualOutput.getBagUpdatesList(),
Matchers.contains(
Matchers.equalTo(
Windmill.TagBag.newBuilder()
.setTag(bufferTag)
.setStateFamily(stateFamily)
.setDeleteAll(true)
.build())));
verifyHolds(
actualOutput,
buildHold(watermarkDataHoldTag, -1, true),
buildHold(watermarkExtraHoldTag, -1, true));
assertEquals(
expectedBytesRead,
splitIntToLong(getCounter(counters, "WindmillStateBytesRead").getInteger()));
assertEquals(
Windmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))
.clearCounterUpdates()
.clearOutputMessages()
.build()
.getSerializedSize(),
splitIntToLong(getCounter(counters, "WindmillStateBytesWritten").getInteger()));
assertEquals(0L, splitIntToLong(getCounter(counters, "WindmillShuffleBytesRead").getInteger()));
}
@Test
public void testMergeWindowsCaching() throws Exception {
Coder<KV<String, String>> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());
Coder<WindowedValue<KV<String, String>>> windowedKvCoder =
FullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());
KvCoder<String, List<String>> groupedCoder =
KvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));
Coder<WindowedValue<KV<String, List<String>>>> windowedGroupedCoder =
FullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());
CloudObject spec = CloudObject.forClassName("MergeWindowsDoFn");
SdkComponents sdkComponents = SdkComponents.create();
sdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);
addString(
spec,
PropertyNames.SERIALIZED_FN,
StringUtils.byteArrayToJsonString(
WindowingStrategyTranslation.toMessageProto(
WindowingStrategy.of(FixedWindows.of(Duration.standardSeconds(1)))
.withTimestampCombiner(TimestampCombiner.EARLIEST),
sdkComponents)
.toByteArray()));
addObject(
spec,
WorkerPropertyNames.INPUT_CODER,
CloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));
ParallelInstruction mergeWindowsInstruction =
new ParallelInstruction()
.setSystemName("MergeWindows-System")
.setName("MergeWindowsStep")
.setOriginalName("MergeWindowsOriginal")
.setParDo(
new ParDoInstruction()
.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))
.setNumOutputs(1)
.setUserFn(spec))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setName("output")
.setCodec(
CloudObjects.asCloudObject(
windowedGroupedCoder, /* sdkComponents= */ null))));
List<ParallelInstruction> instructions =
Arrays.asList(
makeWindowingSourceInstruction(kvCoder),
mergeWindowsInstruction,
makeDoFnInstruction(new PassthroughDoFn(), 1, groupedCoder),
makeSinkInstruction(groupedCoder, 2));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);
Map<String, String> nameMap = new HashMap<>();
nameMap.put("MergeWindowsStep", "MergeWindows");
worker.addStateNameMappings(nameMap);
worker.start();
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \""
+ DEFAULT_COMPUTATION_ID
+ "\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \""
+ DEFAULT_KEY_STRING
+ "\""
+ " sharding_key: "
+ DEFAULT_SHARDING_KEY
+ " cache_token: 1"
+ " work_token: 1"
+ " is_new_key: 1"
+ " message_bundles {"
+ " source_computation_id: \""
+ DEFAULT_SOURCE_COMPUTATION_ID
+ "\""
+ " messages {"
+ " timestamp: 0"
+ " data: \""
+ dataStringForIndex(0)
+ "\""
+ " }"
+ " }"
+ " }"
+ "}",
intervalWindowBytes(WINDOW_AT_ZERO)));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Iterable<CounterUpdate> counters = worker.buildCounters();
String window = "/gAAAAAAAA-joBw/";
String timerTagPrefix = "/s" + window + "+0";
ByteString bufferTag = ByteString.copyFromUtf8(window + "+ubuf");
ByteString paneInfoTag = ByteString.copyFromUtf8(window + "+upane");
String watermarkDataHoldTag = window + "+uhold";
String watermarkExtraHoldTag = window + "+uextra";
String stateFamily = "MergeWindows";
ByteString bufferData = ByteString.copyFromUtf8("data0");
ByteString outputData =
ByteString.copyFrom(
new byte[] {
(byte) 0xff,
(byte) 0xff,
(byte) 0xff,
(byte) 0xff,
0x01,
0x05,
0x64,
0x61,
0x74,
0x61,
0x30,
0x00
});
long timerTimestamp = 999000L;
WorkItemCommitRequest actualOutput = result.get(1L);
verifyTimers(actualOutput, buildWatermarkTimer(timerTagPrefix, 999));
assertThat(
actualOutput.getBagUpdatesList(),
Matchers.contains(
Matchers.equalTo(
Windmill.TagBag.newBuilder()
.setTag(bufferTag)
.setStateFamily(stateFamily)
.addValues(bufferData)
.build())));
verifyHolds(actualOutput, buildHold(watermarkDataHoldTag, 0, false));
assertEquals(0L, splitIntToLong(getCounter(counters, "WindmillStateBytesRead").getInteger()));
assertEquals(
Windmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))
.clearCounterUpdates()
.clearOutputMessages()
.build()
.getSerializedSize(),
splitIntToLong(getCounter(counters, "WindmillStateBytesWritten").getInteger()));
assertEquals(
VarInt.getLength(0L)
+ dataStringForIndex(0).length()
+ addPaneTag(PaneInfo.NO_FIRING, intervalWindowBytes(WINDOW_AT_ZERO)).size()
+ 5L
,
splitIntToLong(getCounter(counters, "WindmillShuffleBytesRead").getInteger()));
Windmill.GetWorkResponse.Builder getWorkResponse = Windmill.GetWorkResponse.newBuilder();
getWorkResponse
.addWorkBuilder()
.setComputationId(DEFAULT_COMPUTATION_ID)
.setInputDataWatermark(timerTimestamp + 1000)
.addWorkBuilder()
.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))
.setShardingKey(DEFAULT_SHARDING_KEY)
.setWorkToken(2)
.setCacheToken(1)
.getTimersBuilder()
.addTimers(buildWatermarkTimer(timerTagPrefix, timerTimestamp));
server.whenGetWorkCalled().thenReturn(getWorkResponse.build());
long expectedBytesRead = 0L;
Windmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();
Windmill.KeyedGetDataResponse.Builder dataBuilder =
dataResponse
.addDataBuilder()
.setComputationId(DEFAULT_COMPUTATION_ID)
.addDataBuilder()
.setKey(ByteString.copyFromUtf8(DEFAULT_KEY_STRING))
.setShardingKey(DEFAULT_SHARDING_KEY);
dataBuilder
.addWatermarkHoldsBuilder()
.setTag(ByteString.copyFromUtf8(watermarkExtraHoldTag))
.setStateFamily(stateFamily)
.addTimestamps(0);
dataBuilder
.addValuesBuilder()
.setTag(paneInfoTag)
.setStateFamily(stateFamily)
.getValueBuilder()
.setTimestamp(0)
.setData(ByteString.EMPTY);
server.whenGetDataCalled().thenReturn(dataResponse.build());
expectedBytesRead += dataBuilder.build().getSerializedSize();
result = server.waitForAndGetCommits(1);
counters = worker.buildCounters();
actualOutput = result.get(2L);
assertEquals(1, actualOutput.getOutputMessagesCount());
assertEquals(
DEFAULT_DESTINATION_STREAM_ID, actualOutput.getOutputMessages(0).getDestinationStreamId());
assertEquals(
DEFAULT_KEY_STRING,
actualOutput.getOutputMessages(0).getBundles(0).getKey().toStringUtf8());
assertEquals(0, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getTimestamp());
assertEquals(
outputData, actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getData());
ByteString metadata =
actualOutput.getOutputMessages(0).getBundles(0).getMessages(0).getMetadata();
InputStream inStream = metadata.newInput();
assertEquals(
PaneInfo.createPane(true, true, Timing.ON_TIME), PaneInfoCoder.INSTANCE.decode(inStream));
assertEquals(
Collections.singletonList(WINDOW_AT_ZERO),
DEFAULT_WINDOW_COLLECTION_CODER.decode(inStream, Coder.Context.OUTER));
assertThat(
"" + actualOutput.getValueUpdatesList(),
actualOutput.getValueUpdatesList(),
Matchers.contains(
Matchers.equalTo(
Windmill.TagValue.newBuilder()
.setTag(paneInfoTag)
.setStateFamily(stateFamily)
.setValue(
Windmill.Value.newBuilder()
.setTimestamp(Long.MAX_VALUE)
.setData(ByteString.EMPTY))
.build())));
assertThat(
"" + actualOutput.getBagUpdatesList(),
actualOutput.getBagUpdatesList(),
Matchers.contains(
Matchers.equalTo(
Windmill.TagBag.newBuilder()
.setTag(bufferTag)
.setStateFamily(stateFamily)
.setDeleteAll(true)
.build())));
verifyHolds(
actualOutput,
buildHold(watermarkDataHoldTag, -1, true),
buildHold(watermarkExtraHoldTag, -1, true));
assertEquals(
expectedBytesRead,
splitIntToLong(getCounter(counters, "WindmillStateBytesRead").getInteger()));
assertEquals(
Windmill.WorkItemCommitRequest.newBuilder(removeDynamicFields(actualOutput))
.clearCounterUpdates()
.clearOutputMessages()
.build()
.getSerializedSize(),
splitIntToLong(getCounter(counters, "WindmillStateBytesWritten").getInteger()));
assertEquals(0L, splitIntToLong(getCounter(counters, "WindmillShuffleBytesRead").getInteger()));
CacheStats stats = worker.stateCache.getCacheStats();
LOG.info("cache stats {}", stats);
assertEquals(1, stats.hitCount());
assertEquals(4, stats.missCount());
}
private void runMergeSessionsActions(List<Action> actions) throws Exception {
Coder<KV<String, String>> kvCoder = KvCoder.of(StringUtf8Coder.of(), StringUtf8Coder.of());
Coder<WindowedValue<KV<String, String>>> windowedKvCoder =
FullWindowedValueCoder.of(kvCoder, IntervalWindow.getCoder());
KvCoder<String, List<String>> groupedCoder =
KvCoder.of(StringUtf8Coder.of(), ListCoder.of(StringUtf8Coder.of()));
Coder<WindowedValue<KV<String, List<String>>>> windowedGroupedCoder =
FullWindowedValueCoder.of(groupedCoder, IntervalWindow.getCoder());
CloudObject spec = CloudObject.forClassName("MergeWindowsDoFn");
SdkComponents sdkComponents = SdkComponents.create();
sdkComponents.registerEnvironment(Environments.JAVA_SDK_HARNESS_ENVIRONMENT);
addString(
spec,
PropertyNames.SERIALIZED_FN,
StringUtils.byteArrayToJsonString(
WindowingStrategyTranslation.toMessageProto(
WindowingStrategy.of(Sessions.withGapDuration(Duration.millis(10)))
.withMode(AccumulationMode.DISCARDING_FIRED_PANES)
.withTrigger(
Repeatedly.forever(
AfterWatermark.pastEndOfWindow()
.withLateFirings(AfterPane.elementCountAtLeast(1))))
.withAllowedLateness(Duration.standardMinutes(60)),
sdkComponents)
.toByteArray()));
addObject(
spec,
WorkerPropertyNames.INPUT_CODER,
CloudObjects.asCloudObject(windowedKvCoder, /* sdkComponents= */ null));
ParallelInstruction mergeWindowsInstruction =
new ParallelInstruction()
.setSystemName("MergeWindows-System")
.setName("MergeWindowsStep")
.setOriginalName("MergeWindowsOriginal")
.setParDo(
new ParDoInstruction()
.setInput(new InstructionInput().setProducerInstructionIndex(0).setOutputNum(0))
.setNumOutputs(1)
.setUserFn(spec))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setName("output")
.setCodec(
CloudObjects.asCloudObject(
windowedGroupedCoder, /* sdkComponents= */ null))));
List<ParallelInstruction> instructions =
Arrays.asList(
makeWindowingSourceInstruction(kvCoder),
mergeWindowsInstruction,
makeSinkInstruction(groupedCoder, 1));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), false /* publishCounters */);
Map<String, String> nameMap = new HashMap<>();
nameMap.put("MergeWindowsStep", "MergeWindows");
worker.addStateNameMappings(nameMap);
worker.start();
server.whenGetDataCalled().answerByDefault(EMPTY_DATA_RESPONDER);
for (int i = 0; i < actions.size(); ++i) {
Action action = actions.get(i);
server.whenGetWorkCalled().thenReturn(action.response);
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
WorkItemCommitRequest actualOutput = result.get(i + 1L);
assertThat(actualOutput, Matchers.not(Matchers.nullValue()));
verifyTimers(actualOutput, action.expectedTimers);
verifyHolds(actualOutput, action.expectedHolds);
}
}
@Test
public void testMergeSessionWindows() throws Exception {
runMergeSessionsActions(
Collections.singletonList(
new Action(
buildSessionInput(
1, 40, 0, Collections.singletonList(1L), Collections.EMPTY_LIST))
.withHolds(
buildHold("/gAAAAAAAAAsK/+uhold", -1, true),
buildHold("/gAAAAAAAAAsK/+uextra", -1, true))
.withTimers(buildWatermarkTimer("/s/gAAAAAAAAAsK/+0", 3600010))));
runMergeSessionsActions(
Arrays.asList(
new Action(
buildSessionInput(
1, 0, 0, Collections.singletonList(1L), Collections.EMPTY_LIST))
.withHolds(buildHold("/gAAAAAAAAAsK/+uhold", 10, false))
.withTimers(
buildWatermarkTimer("/s/gAAAAAAAAAsK/+0", 10),
buildWatermarkTimer("/s/gAAAAAAAAAsK/+0", 3600010)),
new Action(
buildSessionInput(
2,
30,
0,
Collections.EMPTY_LIST,
Collections.singletonList(buildWatermarkTimer("/s/gAAAAAAAAAsK/+0", 10))))
.withTimers(buildWatermarkTimer("/s/gAAAAAAAAAsK/+0", 3600010))
.withHolds(
buildHold("/gAAAAAAAAAsK/+uhold", -1, true),
buildHold("/gAAAAAAAAAsK/+uextra", -1, true)),
new Action(
buildSessionInput(
3, 30, 0, Collections.singletonList(8L), Collections.EMPTY_LIST))
.withTimers(
buildWatermarkTimer("/s/gAAAAAAAABIR/+0", 3600017),
buildWatermarkTimer("/s/gAAAAAAAAAsK/+0", 10, true),
buildWatermarkTimer("/s/gAAAAAAAAAsK/+0", 3600010, true))
.withHolds(
buildHold("/gAAAAAAAAAsK/+uhold", -1, true),
buildHold("/gAAAAAAAAAsK/+uextra", -1, true)),
new Action(
buildSessionInput(
4, 30, 0, Collections.singletonList(31L), Collections.EMPTY_LIST))
.withTimers(
buildWatermarkTimer("/s/gAAAAAAAACkK/+0", 3600040),
buildWatermarkTimer("/s/gAAAAAAAACkK/+0", 40))
.withHolds(buildHold("/gAAAAAAAACkK/+uhold", 40, false)),
new Action(buildSessionInput(5, 30, 0, Arrays.asList(17L, 23L), Collections.EMPTY_LIST))
.withTimers(
buildWatermarkTimer("/s/gAAAAAAAACkK/+0", 3600040, true),
buildWatermarkTimer("/s/gAAAAAAAACkK/+0", 40, true),
buildWatermarkTimer("/s/gAAAAAAAABIR/+0", 3600017, true),
buildWatermarkTimer("/s/gAAAAAAAABIR/+0", 17, true),
buildWatermarkTimer("/s/gAAAAAAAACko/+0", 40),
buildWatermarkTimer("/s/gAAAAAAAACko/+0", 3600040))
.withHolds(
buildHold("/gAAAAAAAACkK/+uhold", -1, true),
buildHold("/gAAAAAAAACkK/+uextra", -1, true),
buildHold("/gAAAAAAAAAsK/+uhold", 40, true),
buildHold("/gAAAAAAAAAsK/+uextra", 3600040, true)),
new Action(
buildSessionInput(
6,
50,
0,
Collections.EMPTY_LIST,
Collections.singletonList(buildWatermarkTimer("/s/gAAAAAAAACko/+0", 40))))
.withTimers(buildWatermarkTimer("/s/gAAAAAAAACko/+0", 3600040))
.withHolds(
buildHold("/gAAAAAAAAAsK/+uhold", -1, true),
buildHold("/gAAAAAAAAAsK/+uextra", -1, true))));
}
private List<ParallelInstruction> makeUnboundedSourcePipeline() throws Exception {
return makeUnboundedSourcePipeline(1, new PrintFn());
}
private List<ParallelInstruction> makeUnboundedSourcePipeline(
int numMessagesPerShard,
DoFn<ValueWithRecordId<KV<Integer, Integer>>, String> doFn)
throws Exception {
DataflowPipelineOptions options =
PipelineOptionsFactory.create().as(DataflowPipelineOptions.class);
options.setNumWorkers(1);
CloudObject codec =
CloudObjects.asCloudObject(
WindowedValue.getFullCoder(
ValueWithRecordId.ValueWithRecordIdCoder.of(
KvCoder.of(VarIntCoder.of(), VarIntCoder.of())),
GlobalWindow.Coder.INSTANCE),
/* sdkComponents= */ null);
return Arrays.asList(
new ParallelInstruction()
.setSystemName("Read")
.setOriginalName("OriginalReadName")
.setRead(
new ReadInstruction()
.setSource(
CustomSources.serializeToCloudSource(
new TestCountingSource(numMessagesPerShard), options)
.setCodec(codec)))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setName("read_output")
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setCodec(codec))),
makeDoFnInstruction(doFn, 0, StringUtf8Coder.of(), WindowingStrategy.globalDefault()),
makeSinkInstruction(StringUtf8Coder.of(), 1, GlobalWindow.Coder.INSTANCE));
}
@Test
public void testUnboundedSources() throws Exception {
List<Integer> finalizeTracker = Lists.newArrayList();
TestCountingSource.setFinalizeTracker(finalizeTracker);
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorker worker =
makeWorker(
makeUnboundedSourcePipeline(),
createTestingPipelineOptions(server),
false /* publishCounters */);
worker.start();
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 1"
+ " cache_token: 1"
+ " }"
+ "}",
null));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Iterable<CounterUpdate> counters = worker.buildCounters();
Windmill.WorkItemCommitRequest commit = result.get(1L);
UnsignedLong finalizeId =
UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
assertThat(
removeDynamicFields(commit),
equalTo(
setMessagesMetadata(
PaneInfo.NO_FIRING,
CoderUtils.encodeToByteArray(
CollectionCoder.of(GlobalWindow.Coder.INSTANCE),
Collections.singletonList(GlobalWindow.INSTANCE)),
parseCommitRequest(
"key: \"0000000000000001\" "
+ "sharding_key: 1 "
+ "work_token: 1 "
+ "cache_token: 1 "
+ "source_backlog_bytes: 7 "
+ "source_bytes_processed: 18 "
+ "output_messages {"
+ " destination_stream_id: \"out\""
+ " bundles {"
+ " key: \"0000000000000001\""
+ " messages {"
+ " timestamp: 0"
+ " data: \"0:0\""
+ " }"
+ " messages_ids: \"\""
+ " }"
+ "} "
+ "source_state_updates {"
+ " state: \"\000\""
+ " finalize_ids: "
+ finalizeId
+ "} "
+ "source_watermark: 1000"))
.build()));
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 2"
+ " cache_token: 1"
+ " source_state {"
+ " state: \"\001\""
+ " finalize_ids: "
+ finalizeId
+ " } "
+ " }"
+ "}",
null));
result = server.waitForAndGetCommits(1);
counters = worker.buildCounters();
commit = result.get(2L);
finalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
assertThat(
removeDynamicFields(commit),
equalTo(
parseCommitRequest(
"key: \"0000000000000001\" "
+ "sharding_key: 1 "
+ "work_token: 2 "
+ "cache_token: 1 "
+ "source_backlog_bytes: 7 "
+ "source_bytes_processed: 0 "
+ "source_state_updates {"
+ " state: \"\000\""
+ " finalize_ids: "
+ finalizeId
+ "} "
+ "source_watermark: 1000")
.build()));
assertThat(finalizeTracker, contains(0));
assertNull(getCounter(counters, "dataflow_input_size-computation"));
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000002\""
+ " sharding_key: 2"
+ " work_token: 3"
+ " cache_token: 2"
+ " source_state {"
+ " state: \"\000\""
+ " } "
+ " }"
+ "}",
null));
result = server.waitForAndGetCommits(1);
counters = worker.buildCounters();
commit = result.get(3L);
finalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
assertThat(
removeDynamicFields(commit),
equalTo(
parseCommitRequest(
"key: \"0000000000000002\" "
+ "sharding_key: 2 "
+ "work_token: 3 "
+ "cache_token: 2 "
+ "source_backlog_bytes: 7 "
+ "source_bytes_processed: 0 "
+ "source_state_updates {"
+ " state: \"\000\""
+ " finalize_ids: "
+ finalizeId
+ "} "
+ "source_watermark: 1000")
.build()));
assertNull(getCounter(counters, "dataflow_input_size-computation"));
}
@Test
public void testUnboundedSourcesDrain() throws Exception {
List<Integer> finalizeTracker = Lists.newArrayList();
TestCountingSource.setFinalizeTracker(finalizeTracker);
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorker worker =
makeWorker(
makeUnboundedSourcePipeline(),
createTestingPipelineOptions(server),
true /* publishCounters */);
worker.start();
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 2"
+ " cache_token: 3"
+ " }"
+ "}",
null));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Windmill.WorkItemCommitRequest commit = result.get(2L);
UnsignedLong finalizeId =
UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
assertThat(
removeDynamicFields(commit),
equalTo(
setMessagesMetadata(
PaneInfo.NO_FIRING,
CoderUtils.encodeToByteArray(
CollectionCoder.of(GlobalWindow.Coder.INSTANCE),
Collections.singletonList(GlobalWindow.INSTANCE)),
parseCommitRequest(
"key: \"0000000000000001\" "
+ "sharding_key: 1 "
+ "work_token: 2 "
+ "cache_token: 3 "
+ "source_backlog_bytes: 7 "
+ "source_bytes_processed: 18 "
+ "output_messages {"
+ " destination_stream_id: \"out\""
+ " bundles {"
+ " key: \"0000000000000001\""
+ " messages {"
+ " timestamp: 0"
+ " data: \"0:0\""
+ " }"
+ " messages_ids: \"\""
+ " }"
+ "} "
+ "source_state_updates {"
+ " state: \"\000\""
+ " finalize_ids: "
+ finalizeId
+ "} "
+ "source_watermark: 1000"))
.build()));
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 3"
+ " cache_token: 3"
+ " source_state {"
+ " only_finalize: true"
+ " finalize_ids: "
+ finalizeId
+ " }"
+ " }"
+ "}",
null));
result = server.waitForAndGetCommits(1);
commit = result.get(3L);
assertThat(
commit,
equalTo(
parseCommitRequest(
"key: \"0000000000000001\" "
+ "sharding_key: 1 "
+ "work_token: 3 "
+ "cache_token: 3 "
+ "source_state_updates {"
+ " only_finalize: true"
+ "} ")
.build()));
assertThat(finalizeTracker, contains(0));
}
@Test
public void testUnboundedSourceWorkRetry() throws Exception {
List<Integer> finalizeTracker = Lists.newArrayList();
TestCountingSource.setFinalizeTracker(finalizeTracker);
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setWorkerCacheMb(0);
StreamingDataflowWorker worker =
makeWorker(makeUnboundedSourcePipeline(), options, false /* publishCounters */);
worker.start();
Windmill.GetWorkResponse work =
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 1"
+ " cache_token: 1"
+ " }"
+ "}",
null);
server.whenGetWorkCalled().thenReturn(work);
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Iterable<CounterUpdate> counters = worker.buildCounters();
Windmill.WorkItemCommitRequest commit = result.get(1L);
UnsignedLong finalizeId =
UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
Windmill.WorkItemCommitRequest expectedCommit =
setMessagesMetadata(
PaneInfo.NO_FIRING,
CoderUtils.encodeToByteArray(
CollectionCoder.of(GlobalWindow.Coder.INSTANCE),
Collections.singletonList(GlobalWindow.INSTANCE)),
parseCommitRequest(
"key: \"0000000000000001\" "
+ "sharding_key: 1 "
+ "work_token: 1 "
+ "cache_token: 1 "
+ "source_backlog_bytes: 7 "
+ "source_bytes_processed: 18 "
+ "output_messages {"
+ " destination_stream_id: \"out\""
+ " bundles {"
+ " key: \"0000000000000001\""
+ " messages {"
+ " timestamp: 0"
+ " data: \"0:0\""
+ " }"
+ " messages_ids: \"\""
+ " }"
+ "} "
+ "source_state_updates {"
+ " state: \"\000\""
+ " finalize_ids: "
+ finalizeId
+ "} "
+ "source_watermark: 1000"))
.build();
assertThat(removeDynamicFields(commit), equalTo(expectedCommit));
server.clearCommitsReceived();
server.whenGetWorkCalled().thenReturn(work);
result = server.waitForAndGetCommits(1);
commit = result.get(1L);
finalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
Windmill.WorkItemCommitRequest.Builder commitBuilder = expectedCommit.toBuilder();
commitBuilder
.getSourceStateUpdatesBuilder()
.setFinalizeIds(0, commit.getSourceStateUpdates().getFinalizeIds(0));
expectedCommit = commitBuilder.build();
assertThat(removeDynamicFields(commit), equalTo(expectedCommit));
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 2"
+ " cache_token: 1"
+ " source_state {"
+ " state: \"\001\""
+ " finalize_ids: "
+ finalizeId
+ " } "
+ " }"
+ "}",
null));
result = server.waitForAndGetCommits(1);
commit = result.get(2L);
finalizeId = UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
assertThat(
removeDynamicFields(commit),
equalTo(
parseCommitRequest(
"key: \"0000000000000001\" "
+ "sharding_key: 1 "
+ "work_token: 2 "
+ "cache_token: 1 "
+ "source_backlog_bytes: 7 "
+ "source_bytes_processed: 0 "
+ "source_state_updates {"
+ " state: \"\000\""
+ " finalize_ids: "
+ finalizeId
+ "} "
+ "source_watermark: 1000")
.build()));
assertThat(finalizeTracker, contains(0));
}
@Test
public void testActiveWork() throws Exception {
BoundedQueueExecutor mockExecutor = Mockito.mock(BoundedQueueExecutor.class);
ComputationState computationState =
new ComputationState(
"computation",
defaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),
mockExecutor,
ImmutableMap.of(),
null);
ShardedKey key1 = ShardedKey.create(ByteString.copyFromUtf8("key1"), 1);
ShardedKey key2 = ShardedKey.create(ByteString.copyFromUtf8("key2"), 2);
Work m1 = createMockWork(1);
assertTrue(computationState.activateWork(key1, m1));
Mockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());
computationState.completeWorkAndScheduleNextWorkForKey(key1, 1);
Mockito.verifyNoMoreInteractions(mockExecutor);
Work m2 = createMockWork(2);
assertTrue(computationState.activateWork(key1, m2));
Mockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());
Work m3 = createMockWork(3);
assertTrue(computationState.activateWork(key1, m3));
Mockito.verifyNoMoreInteractions(mockExecutor);
Work m4 = createMockWork(4);
assertTrue(computationState.activateWork(key2, m4));
Mockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());
computationState.completeWorkAndScheduleNextWorkForKey(key2, 4);
Mockito.verifyNoMoreInteractions(mockExecutor);
computationState.completeWorkAndScheduleNextWorkForKey(key1, 2);
Mockito.verify(mockExecutor).forceExecute(m3, m3.getWorkItem().getSerializedSize());
computationState.completeWorkAndScheduleNextWorkForKey(key1, 3);
Mockito.verifyNoMoreInteractions(mockExecutor);
Work m5 = createMockWork(5);
computationState.activateWork(key1, m5);
Mockito.verify(mockExecutor).execute(m5, m5.getWorkItem().getSerializedSize());
assertFalse(computationState.activateWork(key1, m5));
Mockito.verifyNoMoreInteractions(mockExecutor);
computationState.completeWorkAndScheduleNextWorkForKey(key1, 5);
Mockito.verifyNoMoreInteractions(mockExecutor);
}
@Test
public void testActiveWorkForShardedKeys() throws Exception {
BoundedQueueExecutor mockExecutor = Mockito.mock(BoundedQueueExecutor.class);
ComputationState computationState =
new ComputationState(
"computation",
defaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),
mockExecutor,
ImmutableMap.of(),
null);
ShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8("key1"), 1);
ShardedKey key1Shard2 = ShardedKey.create(ByteString.copyFromUtf8("key1"), 2);
Work m1 = createMockWork(1);
assertTrue(computationState.activateWork(key1Shard1, m1));
Mockito.verify(mockExecutor).execute(m1, m1.getWorkItem().getSerializedSize());
computationState.completeWorkAndScheduleNextWorkForKey(key1Shard1, 1);
Mockito.verifyNoMoreInteractions(mockExecutor);
Work m2 = createMockWork(2);
assertTrue(computationState.activateWork(key1Shard1, m2));
Mockito.verify(mockExecutor).execute(m2, m2.getWorkItem().getSerializedSize());
Work m3 = createMockWork(3);
assertTrue(computationState.activateWork(key1Shard1, m3));
Mockito.verifyNoMoreInteractions(mockExecutor);
Work m4 = createMockWork(3);
assertFalse(computationState.activateWork(key1Shard1, m4));
Mockito.verifyNoMoreInteractions(mockExecutor);
assertTrue(computationState.activateWork(key1Shard2, m4));
Mockito.verify(mockExecutor).execute(m4, m4.getWorkItem().getSerializedSize());
assertFalse(computationState.activateWork(key1Shard2, m4));
computationState.completeWorkAndScheduleNextWorkForKey(key1Shard2, 3);
Mockito.verifyNoMoreInteractions(mockExecutor);
}
@Test
@Ignore
public void testMaxThreadMetric() throws Exception {
int maxThreads = 2;
int threadExpiration = 60;
BoundedQueueExecutor executor =
new BoundedQueueExecutor(
maxThreads,
threadExpiration,
TimeUnit.SECONDS,
maxThreads,
10000000,
new ThreadFactoryBuilder()
.setNameFormat("DataflowWorkUnits-%d")
.setDaemon(true)
.build());
ComputationState computationState =
new ComputationState(
"computation",
defaultMapTask(Collections.singletonList(makeSourceInstruction(StringUtf8Coder.of()))),
executor,
ImmutableMap.of(),
null);
ShardedKey key1Shard1 = ShardedKey.create(ByteString.copyFromUtf8("key1"), 1);
Consumer<Work> sleepProcessWorkFn =
unused -> {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
};
Work m2 = createMockWork(2, sleepProcessWorkFn);
Work m3 = createMockWork(3, sleepProcessWorkFn);
assertTrue(computationState.activateWork(key1Shard1, m2));
assertTrue(computationState.activateWork(key1Shard1, m3));
executor.execute(m2, m2.getWorkItem().getSerializedSize());
executor.execute(m3, m3.getWorkItem().getSerializedSize());
long i = 990L;
assertTrue(executor.allThreadsActiveTime() >= i);
executor.shutdown();
}
volatile boolean stop = false;
@Test
@Test
public void testExceptionInvalidatesCache() throws Exception {
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server.setExpectedExceptionCount(2);
DataflowPipelineOptions options = createTestingPipelineOptions(server);
options.setNumWorkers(1);
DataflowPipelineDebugOptions debugOptions = options.as(DataflowPipelineDebugOptions.class);
debugOptions.setUnboundedReaderMaxElements(1);
CloudObject codec =
CloudObjects.asCloudObject(
WindowedValue.getFullCoder(
ValueWithRecordId.ValueWithRecordIdCoder.of(
KvCoder.of(VarIntCoder.of(), VarIntCoder.of())),
GlobalWindow.Coder.INSTANCE),
/* sdkComponents= */ null);
TestCountingSource counter = new TestCountingSource(3).withThrowOnFirstSnapshot(true);
List<ParallelInstruction> instructions =
Arrays.asList(
new ParallelInstruction()
.setOriginalName("OriginalReadName")
.setSystemName("Read")
.setName(DEFAULT_PARDO_USER_NAME)
.setRead(
new ReadInstruction()
.setSource(
CustomSources.serializeToCloudSource(counter, options).setCodec(codec)))
.setOutputs(
Collections.singletonList(
new InstructionOutput()
.setName("read_output")
.setOriginalName(DEFAULT_OUTPUT_ORIGINAL_NAME)
.setSystemName(DEFAULT_OUTPUT_SYSTEM_NAME)
.setCodec(codec))),
makeDoFnInstruction(
new TestExceptionInvalidatesCacheFn(),
0,
StringUtf8Coder.of(),
WindowingStrategy.globalDefault()),
makeSinkInstruction(StringUtf8Coder.of(), 1, GlobalWindow.Coder.INSTANCE));
StreamingDataflowWorker worker =
makeWorker(
instructions,
options.as(StreamingDataflowWorkerOptions.class),
true /* publishCounters */);
worker.setRetryLocallyDelayMs(100);
worker.start();
for (int i = 0; i < 3; i++) {
ByteString state;
if (i == 0 || i == 1) {
state = ByteString.EMPTY;
} else {
state = ByteString.copyFrom(new byte[] {42});
}
Windmill.GetDataResponse.Builder dataResponse = Windmill.GetDataResponse.newBuilder();
dataResponse
.addDataBuilder()
.setComputationId(DEFAULT_COMPUTATION_ID)
.addDataBuilder()
.setKey(ByteString.copyFromUtf8("0000000000000001"))
.setShardingKey(1)
.addValuesBuilder()
.setTag(ByteString.copyFromUtf8("
.setStateFamily(DEFAULT_PARDO_STATE_FAMILY)
.getValueBuilder()
.setTimestamp(0)
.setData(state);
server.whenGetDataCalled().thenReturn(dataResponse.build());
}
for (int i = 0; i < 3; i++) {
StringBuilder sb = new StringBuilder();
sb.append("work {\n");
sb.append(" computation_id: \"computation\"\n");
sb.append(" input_data_watermark: 0\n");
sb.append(" work {\n");
sb.append(" key: \"0000000000000001\"\n");
sb.append(" sharding_key: 1\n");
sb.append(" work_token: ");
sb.append(i);
sb.append(" cache_token: 1");
sb.append("\n");
if (i > 0) {
int previousCheckpoint = i - 1;
sb.append(" source_state {\n");
sb.append(" state: \"");
sb.append((char) previousCheckpoint);
sb.append("\"\n");
sb.append(" }\n");
}
sb.append(" }\n");
sb.append("}\n");
server.whenGetWorkCalled().thenReturn(buildInput(sb.toString(), null));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Windmill.WorkItemCommitRequest commit = result.get((long) i);
UnsignedLong finalizeId =
UnsignedLong.fromLongBits(commit.getSourceStateUpdates().getFinalizeIds(0));
sb = new StringBuilder();
sb.append("key: \"0000000000000001\"\n");
sb.append("sharding_key: 1\n");
sb.append("work_token: ");
sb.append(i);
sb.append("\n");
sb.append("cache_token: 1\n");
sb.append("output_messages {\n");
sb.append(" destination_stream_id: \"out\"\n");
sb.append(" bundles {\n");
sb.append(" key: \"0000000000000001\"\n");
int messageNum = i;
sb.append(" messages {\n");
sb.append(" timestamp: ");
sb.append(messageNum * 1000);
sb.append("\n");
sb.append(" data: \"0:");
sb.append(messageNum);
sb.append("\"\n");
sb.append(" }\n");
sb.append(" messages_ids: \"\"\n");
sb.append(" }\n");
sb.append("}\n");
if (i == 0) {
sb.append("value_updates {\n");
sb.append(" tag: \"
sb.append(" value {\n");
sb.append(" timestamp: 0\n");
sb.append(" data: \"");
sb.append((char) 42);
sb.append("\"\n");
sb.append(" }\n");
sb.append(" state_family: \"parDoStateFamily\"\n");
sb.append("}\n");
}
int sourceState = i;
sb.append("source_state_updates {\n");
sb.append(" state: \"");
sb.append((char) sourceState);
sb.append("\"\n");
sb.append(" finalize_ids: ");
sb.append(finalizeId);
sb.append("}\n");
sb.append("source_watermark: ");
sb.append((sourceState + 1) * 1000);
sb.append("\n");
sb.append("source_backlog_bytes: 7\n");
assertThat(
setValuesTimestamps(
removeDynamicFields(commit)
.toBuilder()
.clearOutputTimers()
.clearSourceBytesProcessed())
.build(),
equalTo(
setMessagesMetadata(
PaneInfo.NO_FIRING,
CoderUtils.encodeToByteArray(
CollectionCoder.of(GlobalWindow.Coder.INSTANCE),
ImmutableList.of(GlobalWindow.INSTANCE)),
parseCommitRequest(sb.toString()))
.build()));
}
}
@Test
public void testHugeCommits() throws Exception {
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(new FanoutFn(), 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
server.whenGetWorkCalled().thenReturn(makeInput(0, TimeUnit.MILLISECONDS.toMicros(0)));
server.waitForAndGetCommits(0);
worker.stop();
}
@Test
public void testActiveWorkRefresh() throws Exception {
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(new SlowDoFn(), 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setActiveWorkRefreshPeriodMillis(100);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
server.whenGetWorkCalled().thenReturn(makeInput(0, TimeUnit.MILLISECONDS.toMicros(0)));
server.waitForAndGetCommits(1);
worker.stop();
assertThat(server.numGetDataRequests(), greaterThan(0));
}
@Test
public void testLatencyAttributionProtobufsPopulated() {
FakeClock clock = new FakeClock();
Work work = Work.create(null, clock, Collections.emptyList(), unused -> {});
clock.sleep(Duration.millis(10));
work.setState(Work.State.PROCESSING);
clock.sleep(Duration.millis(20));
work.setState(Work.State.READING);
clock.sleep(Duration.millis(30));
work.setState(Work.State.PROCESSING);
clock.sleep(Duration.millis(40));
work.setState(Work.State.COMMIT_QUEUED);
clock.sleep(Duration.millis(50));
work.setState(Work.State.COMMITTING);
clock.sleep(Duration.millis(60));
Iterator<LatencyAttribution> it = work.getLatencyAttributions().iterator();
assertTrue(it.hasNext());
LatencyAttribution lat = it.next();
assertSame(State.QUEUED, lat.getState());
assertEquals(10, lat.getTotalDurationMillis());
assertTrue(it.hasNext());
lat = it.next();
assertSame(State.ACTIVE, lat.getState());
assertEquals(60, lat.getTotalDurationMillis());
assertTrue(it.hasNext());
lat = it.next();
assertSame(State.READING, lat.getState());
assertEquals(30, lat.getTotalDurationMillis());
assertTrue(it.hasNext());
lat = it.next();
assertSame(State.COMMITTING, lat.getState());
assertEquals(110, lat.getTotalDurationMillis());
assertFalse(it.hasNext());
}
@Test
public void testLatencyAttributionToQueuedState() throws Exception {
final int workToken = 3232;
FakeClock clock = new FakeClock();
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(
new FakeSlowDoFn(clock, Duration.millis(1000)), 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setActiveWorkRefreshPeriodMillis(100);
options.setNumberOfWorkerHarnessThreads(1);
StreamingDataflowWorker worker =
makeWorker(
instructions,
options,
false /* publishCounters */,
clock,
clock::newFakeScheduledExecutor);
worker.start();
ActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);
server.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);
server
.whenGetWorkCalled()
.thenReturn(makeInput(workToken + 1, 0 /* timestamp */))
.thenReturn(makeInput(workToken, 1 /* timestamp */));
server.waitForAndGetCommits(2);
worker.stop();
assertEquals(
awrSink.getLatencyAttributionDuration(workToken, State.QUEUED), Duration.millis(1000));
assertEquals(awrSink.getLatencyAttributionDuration(workToken + 1, State.QUEUED), Duration.ZERO);
}
@Test
public void testLatencyAttributionToActiveState() throws Exception {
final int workToken = 4242;
FakeClock clock = new FakeClock();
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(
new FakeSlowDoFn(clock, Duration.millis(1000)), 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setActiveWorkRefreshPeriodMillis(100);
StreamingDataflowWorker worker =
makeWorker(
instructions,
options,
false /* publishCounters */,
clock,
clock::newFakeScheduledExecutor);
worker.start();
ActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);
server.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);
server.whenGetWorkCalled().thenReturn(makeInput(workToken, 0 /* timestamp */));
server.waitForAndGetCommits(1);
worker.stop();
assertEquals(
awrSink.getLatencyAttributionDuration(workToken, State.ACTIVE), Duration.millis(1000));
}
@Test
public void testLatencyAttributionToReadingState() throws Exception {
final int workToken = 5454;
FakeClock clock = new FakeClock();
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(new ReadingDoFn(), 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setActiveWorkRefreshPeriodMillis(100);
StreamingDataflowWorker worker =
makeWorker(
instructions,
options,
false /* publishCounters */,
clock,
clock::newFakeScheduledExecutor);
worker.start();
ActiveWorkRefreshSink awrSink =
new ActiveWorkRefreshSink(
(request) -> {
clock.sleep(Duration.millis(1000));
return EMPTY_DATA_RESPONDER.apply(request);
});
server.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);
server.whenGetWorkCalled().thenReturn(makeInput(workToken, 0 /* timestamp */));
server.waitForAndGetCommits(1);
worker.stop();
assertEquals(
awrSink.getLatencyAttributionDuration(workToken, State.READING), Duration.millis(1000));
}
@Test
public void testLatencyAttributionToCommittingState() throws Exception {
final int workToken = 6464;
FakeClock clock = new FakeClock();
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
server
.whenCommitWorkCalled()
.answerByDefault(
(request) -> {
clock.sleep(Duration.millis(1000));
return Windmill.CommitWorkResponse.getDefaultInstance();
});
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setActiveWorkRefreshPeriodMillis(100);
StreamingDataflowWorker worker =
makeWorker(
instructions,
options,
false /* publishCounters */,
clock,
clock::newFakeScheduledExecutor);
worker.start();
ActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);
server.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);
server.whenGetWorkCalled().thenReturn(makeInput(workToken, TimeUnit.MILLISECONDS.toMicros(0)));
server.waitForAndGetCommits(1);
worker.stop();
assertEquals(
awrSink.getLatencyAttributionDuration(workToken, State.COMMITTING), Duration.millis(1000));
}
@Test
public void testLatencyAttributionPopulatedInCommitRequest() throws Exception {
final int workToken = 7272;
long dofnWaitTimeMs = 1000;
FakeClock clock = new FakeClock();
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeDoFnInstruction(
new FakeSlowDoFn(clock, Duration.millis(dofnWaitTimeMs)), 0, StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setActiveWorkRefreshPeriodMillis(100);
options.setNumberOfWorkerHarnessThreads(1);
StreamingDataflowWorker worker =
makeWorker(
instructions,
options,
false /* publishCounters */,
clock,
clock::newFakeScheduledExecutor);
worker.start();
ActiveWorkRefreshSink awrSink = new ActiveWorkRefreshSink(EMPTY_DATA_RESPONDER);
server.whenGetDataCalled().answerByDefault(awrSink::getData).delayEachResponseBy(Duration.ZERO);
server.whenGetWorkCalled().thenReturn(makeInput(workToken, 1 /* timestamp */));
Map<Long, WorkItemCommitRequest> workItemCommitRequest = server.waitForAndGetCommits(1);
worker.stop();
assertEquals(
workItemCommitRequest.get((long) workToken).getPerWorkItemLatencyAttributions(0),
LatencyAttribution.newBuilder()
.setState(State.ACTIVE)
.setTotalDurationMillis(dofnWaitTimeMs)
.build());
if (streamingEngine) {
assertEquals(
workItemCommitRequest.get((long) workToken).getPerWorkItemLatencyAttributions(1),
LatencyAttribution.newBuilder()
.setState(State.GET_WORK_IN_TRANSIT_TO_USER_WORKER)
.setTotalDurationMillis(1000)
.build());
}
}
@Test
public void testLimitOnOutputBundleSize() throws Exception {
List<Integer> finalizeTracker = Lists.newArrayList();
TestCountingSource.setFinalizeTracker(finalizeTracker);
final int numMessagesInCustomSourceShard = 100000;
final int inflatedSizePerMessage = 10000;
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorker worker =
makeWorker(
makeUnboundedSourcePipeline(
numMessagesInCustomSourceShard, new InflateDoFn(inflatedSizePerMessage)),
createTestingPipelineOptions(server),
false /* publishCounters */);
worker.start();
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 1"
+ " cache_token: 1"
+ " }"
+ "}",
null));
Matcher<Integer> isWithinBundleSizeLimits =
both(greaterThan(StreamingDataflowWorker.MAX_SINK_BYTES * 9 / 10))
.and(lessThan(StreamingDataflowWorker.MAX_SINK_BYTES * 11 / 10));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Windmill.WorkItemCommitRequest commit = result.get(1L);
assertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 2"
+ " cache_token: 1"
+ " }"
+ "}",
null));
result = server.waitForAndGetCommits(1);
commit = result.get(2L);
assertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);
}
@Test
public void testLimitOnOutputBundleSizeWithMultipleSinks() throws Exception {
List<Integer> finalizeTracker = Lists.newArrayList();
TestCountingSource.setFinalizeTracker(finalizeTracker);
final int numMessagesInCustomSourceShard = 100000;
final int inflatedSizePerMessage = 10000;
List<ParallelInstruction> instructions = new ArrayList<>();
instructions.addAll(
makeUnboundedSourcePipeline(
numMessagesInCustomSourceShard, new InflateDoFn(inflatedSizePerMessage)));
instructions.add(
makeSinkInstruction(
DEFAULT_DESTINATION_STREAM_ID + "-1",
StringUtf8Coder.of(),
1,
GlobalWindow.Coder.INSTANCE));
instructions.add(
makeSinkInstruction(
DEFAULT_DESTINATION_STREAM_ID + "-2",
StringUtf8Coder.of(),
1,
GlobalWindow.Coder.INSTANCE));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorker worker =
makeWorker(instructions, createTestingPipelineOptions(server), true /* publishCounters */);
worker.start();
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 1"
+ " cache_token: 1"
+ " }"
+ "}",
null));
Matcher<Integer> isWithinBundleSizeLimits =
both(greaterThan(StreamingDataflowWorker.MAX_SINK_BYTES * 9 / 10))
.and(lessThan(StreamingDataflowWorker.MAX_SINK_BYTES * 11 / 10));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
Windmill.WorkItemCommitRequest commit = result.get(1L);
assertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);
server
.whenGetWorkCalled()
.thenReturn(
buildInput(
"work {"
+ " computation_id: \"computation\""
+ " input_data_watermark: 0"
+ " work {"
+ " key: \"0000000000000001\""
+ " sharding_key: 1"
+ " work_token: 2"
+ " cache_token: 1"
+ " }"
+ "}",
null));
result = server.waitForAndGetCommits(1);
commit = result.get(2L);
assertThat(commit.getSerializedSize(), isWithinBundleSizeLimits);
}
@Test
public void testStuckCommit() throws Exception {
if (!streamingEngine) {
return;
}
List<ParallelInstruction> instructions =
Arrays.asList(
makeSourceInstruction(StringUtf8Coder.of()),
makeSinkInstruction(StringUtf8Coder.of(), 0));
FakeWindmillServer server = new FakeWindmillServer(errorCollector);
StreamingDataflowWorkerOptions options = createTestingPipelineOptions(server);
options.setStuckCommitDurationMillis(2000);
StreamingDataflowWorker worker = makeWorker(instructions, options, true /* publishCounters */);
worker.start();
server.setDropStreamingCommits(true);
server
.whenGetWorkCalled()
.thenReturn(makeInput(10, TimeUnit.MILLISECONDS.toMicros(2), DEFAULT_KEY_STRING, 1))
.thenReturn(makeInput(15, TimeUnit.MILLISECONDS.toMicros(3), DEFAULT_KEY_STRING, 5));
ConcurrentHashMap<Long, Consumer<CommitStatus>> droppedCommits =
server.waitForDroppedCommits(2);
server.setDropStreamingCommits(false);
server
.whenGetWorkCalled()
.thenReturn(makeInput(1, TimeUnit.MILLISECONDS.toMicros(1), DEFAULT_KEY_STRING, 1));
Map<Long, Windmill.WorkItemCommitRequest> result = server.waitForAndGetCommits(1);
droppedCommits.values().iterator().next().accept(CommitStatus.OK);
worker.stop();
assertTrue(result.containsKey(1L));
assertEquals(
makeExpectedOutput(
1, TimeUnit.MILLISECONDS.toMicros(1), DEFAULT_KEY_STRING, 1, DEFAULT_KEY_STRING)
.build(),
removeDynamicFields(result.get(1L)));
}
static class BlockingFn extends DoFn<String, String> implements TestRule {
public static CountDownLatch blocker = new CountDownLatch(1);
public static Semaphore counter = new Semaphore(0);
public static AtomicInteger callCounter = new AtomicInteger(0);
@ProcessElement
public void processElement(ProcessContext c) throws InterruptedException {
callCounter.incrementAndGet();
counter.release();
blocker.await();
c.output(c.element());
}
@Override
public Statement apply(final Statement base, final Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
blocker = new CountDownLatch(1);
counter = new Semaphore(0);
callCounter = new AtomicInteger();
base.evaluate();
}
};
}
}
static class KeyTokenInvalidFn extends DoFn<KV<String, String>, KV<String, String>> {
static boolean thrown = false;
@ProcessElement
public void processElement(ProcessContext c) {
if (!thrown) {
thrown = true;
throw new KeyTokenInvalidException("key");
} else {
c.output(c.element());
}
}
}
static class LargeCommitFn extends DoFn<KV<String, String>, KV<String, String>> {
@ProcessElement
public void processElement(ProcessContext c) {
if (c.element().getKey().equals("large_key")) {
StringBuilder s = new StringBuilder();
for (int i = 0; i < 100; ++i) {
s.append("large_commit");
}
c.output(KV.of(c.element().getKey(), s.toString()));
} else {
c.output(c.element());
}
}
}
static class ChangeKeysFn extends DoFn<KV<String, String>, KV<String, String>> {
@ProcessElement
public void processElement(ProcessContext c) {
KV<String, String> elem = c.element();
c.output(KV.of(elem.getKey() + "_" + elem.getValue(), elem.getValue()));
}
}
static class TestExceptionFn extends DoFn<String, String> {
boolean firstTime = true;
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
if (firstTime) {
firstTime = false;
try {
throw new Exception("Exception!");
} catch (Exception e) {
throw new Exception("Another exception!", e);
}
}
}
}
static class PassthroughDoFn
extends DoFn<KV<String, Iterable<String>>, KV<String, Iterable<String>>> {
@ProcessElement
public void processElement(ProcessContext c) {
c.output(c.element());
}
}
static class Action {
GetWorkResponse response;
Timer[] expectedTimers = new Timer[] {};
WatermarkHold[] expectedHolds = new WatermarkHold[] {};
public Action(GetWorkResponse response) {
this.response = response;
}
Action withHolds(WatermarkHold... holds) {
this.expectedHolds = holds;
return this;
}
Action withTimers(Timer... timers) {
this.expectedTimers = timers;
return this;
}
}
static class PrintFn extends DoFn<ValueWithRecordId<KV<Integer, Integer>>, String> {
@ProcessElement
public void processElement(ProcessContext c) {
KV<Integer, Integer> elem = c.element().getValue();
c.output(elem.getKey() + ":" + elem.getValue());
}
}
private static class MockWork {
Work create(long workToken) {
return Work.create(
Windmill.WorkItem.newBuilder().setKey(ByteString.EMPTY).setWorkToken(workToken).build(),
Instant::now,
Collections.emptyList(),
work -> {});
}
}
static class TestExceptionInvalidatesCacheFn
extends DoFn<ValueWithRecordId<KV<Integer, Integer>>, String> {
static boolean thrown = false;
@StateId("int")
private final StateSpec<ValueState<Integer>> counter = StateSpecs.value(VarIntCoder.of());
@ProcessElement
public void processElement(ProcessContext c, @StateId("int") ValueState<Integer> state)
throws Exception {
KV<Integer, Integer> elem = c.element().getValue();
if (elem.getValue() == 0) {
LOG.error("**** COUNTER 0 ****");
assertNull(state.read());
state.write(42);
assertEquals((Integer) 42, state.read());
} else if (elem.getValue() == 1) {
LOG.error("**** COUNTER 1 ****");
assertEquals((Integer) 42, state.read());
} else if (elem.getValue() == 2) {
if (!thrown) {
LOG.error("**** COUNTER 2 (will throw) ****");
thrown = true;
throw new Exception("Exception!");
}
LOG.error("**** COUNTER 2 (retry) ****");
assertEquals((Integer) 42, state.read());
} else {
throw new RuntimeException("only expecting values [0,2]");
}
c.output(elem.getKey() + ":" + elem.getValue());
}
}
private static class FanoutFn extends DoFn<String, String> {
@ProcessElement
public void processElement(ProcessContext c) {
StringBuilder builder = new StringBuilder(1000000);
for (int i = 0; i < 1000000; i++) {
builder.append(' ');
}
String largeString = builder.toString();
for (int i = 0; i < 3000; i++) {
c.output(largeString);
}
}
}
private static class SlowDoFn extends DoFn<String, String> {
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
Thread.sleep(1000);
c.output(c.element());
}
}
static class FakeClock implements Supplier<Instant> {
private final PriorityQueue<Job> jobs = new PriorityQueue<>();
private Instant now = Instant.now();
public ScheduledExecutorService newFakeScheduledExecutor(String unused) {
return new FakeScheduledExecutor();
}
@Override
public synchronized Instant get() {
return now;
}
public synchronized void clear() {
jobs.clear();
}
public synchronized void sleep(Duration duration) {
if (duration.isShorterThan(Duration.ZERO)) {
throw new UnsupportedOperationException("Cannot sleep backwards in time");
}
Instant endOfSleep = now.plus(duration);
while (true) {
Job job = jobs.peek();
if (job == null || job.when.isAfter(endOfSleep)) {
break;
}
jobs.remove();
now = job.when;
job.work.run();
}
now = endOfSleep;
}
private synchronized void schedule(Duration fromNow, Runnable work) {
jobs.add(new Job(now.plus(fromNow), work));
}
private static class Job implements Comparable<Job> {
final Instant when;
final Runnable work;
Job(Instant when, Runnable work) {
this.when = when;
this.work = work;
}
@Override
public int compareTo(Job job) {
return when.compareTo(job.when);
}
}
private class FakeScheduledExecutor implements ScheduledExecutorService {
@Override
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
return true;
}
@Override
public void execute(Runnable command) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks)
throws InterruptedException {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public <T> List<Future<T>> invokeAll(
Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws InterruptedException {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks)
throws ExecutionException, InterruptedException {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit)
throws ExecutionException, InterruptedException, TimeoutException {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public boolean isShutdown() {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public boolean isTerminated() {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public void shutdown() {}
@Override
public List<Runnable> shutdownNow() {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public <T> Future<T> submit(Callable<T> task) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public Future<?> submit(Runnable task) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public <T> Future<T> submit(Runnable task, T result) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public <V> ScheduledFuture<V> schedule(Callable<V> callable, long delay, TimeUnit unit) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(
Runnable command, long initialDelay, long period, TimeUnit unit) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(
Runnable command, long initialDelay, long delay, TimeUnit unit) {
if (delay <= 0) {
throw new UnsupportedOperationException(
"Please supply a delay > 0 to scheduleWithFixedDelay");
}
FakeClock.this.schedule(
Duration.millis(unit.toMillis(initialDelay)),
new Runnable() {
@Override
public void run() {
command.run();
FakeClock.this.schedule(Duration.millis(unit.toMillis(delay)), this);
}
});
FakeClock.this.sleep(Duration.ZERO);
return null;
}
}
}
private static class FakeSlowDoFn extends DoFn<String, String> {
private static FakeClock clock;
private final Duration sleep;
FakeSlowDoFn(FakeClock clock, Duration sleep) {
FakeSlowDoFn.clock = clock;
this.sleep = sleep;
}
@ProcessElement
public void processElement(ProcessContext c) throws Exception {
clock.sleep(sleep);
c.output(c.element());
}
}
static class ActiveWorkRefreshSink {
private final Function<GetDataRequest, GetDataResponse> responder;
private final Map<Long, EnumMap<LatencyAttribution.State, Duration>> totalDurations =
new HashMap<>();
ActiveWorkRefreshSink(Function<GetDataRequest, GetDataResponse> responder) {
this.responder = responder;
}
Duration getLatencyAttributionDuration(long workToken, LatencyAttribution.State state) {
EnumMap<LatencyAttribution.State, Duration> durations = totalDurations.get(workToken);
return durations == null ? Duration.ZERO : durations.getOrDefault(state, Duration.ZERO);
}
boolean isActiveWorkRefresh(GetDataRequest request) {
for (ComputationGetDataRequest computationRequest : request.getRequestsList()) {
if (!computationRequest.getComputationId().equals(DEFAULT_COMPUTATION_ID)) {
return false;
}
for (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {
if (keyedRequest.getWorkToken() == 0
|| keyedRequest.getShardingKey() != DEFAULT_SHARDING_KEY
|| keyedRequest.getValuesToFetchCount() != 0
|| keyedRequest.getBagsToFetchCount() != 0
|| keyedRequest.getTagValuePrefixesToFetchCount() != 0
|| keyedRequest.getWatermarkHoldsToFetchCount() != 0) {
return false;
}
}
}
return true;
}
GetDataResponse getData(GetDataRequest request) {
if (!isActiveWorkRefresh(request)) {
return responder.apply(request);
}
for (ComputationGetDataRequest computationRequest : request.getRequestsList()) {
for (KeyedGetDataRequest keyedRequest : computationRequest.getRequestsList()) {
for (LatencyAttribution la : keyedRequest.getLatencyAttributionList()) {
EnumMap<LatencyAttribution.State, Duration> durations =
totalDurations.computeIfAbsent(
keyedRequest.getWorkToken(),
(Long workToken) ->
new EnumMap<LatencyAttribution.State, Duration>(
LatencyAttribution.State.class));
Duration cur = Duration.millis(la.getTotalDurationMillis());
durations.compute(la.getState(), (s, d) -> d == null || d.isShorterThan(cur) ? cur : d);
}
}
}
return EMPTY_DATA_RESPONDER.apply(request);
}
}
static class ReadingDoFn extends DoFn<String, String> {
@StateId("int")
private final StateSpec<ValueState<Integer>> counter = StateSpecs.value(VarIntCoder.of());
@ProcessElement
public void processElement(ProcessContext c, @StateId("int") ValueState<Integer> state) {
state.read();
c.output(c.element());
}
}
/** For each input element, emits a large string. */
private static class InflateDoFn extends DoFn<ValueWithRecordId<KV<Integer, Integer>>, String> {
final int inflatedSize;
/** For each input elements, outputs a string of this length */
InflateDoFn(int inflatedSize) {
this.inflatedSize = inflatedSize;
}
@ProcessElement
public void processElement(ProcessContext c) {
char[] chars = new char[inflatedSize];
Arrays.fill(chars, ' ');
c.output(new String(chars));
}
}
} |
According to [@aloubyansky](https://github.com/quarkusio/quarkus/pull/21890#issuecomment-984754188) it should be like: ```suggestion if (mavenSettings != null) { return Files.exists(Path.of(mavenSettings)) ? mavenSettings : null; } ``` | private static String getMavenSettingsArg() {
final String mavenSettings = System.getProperty("maven.settings");
if (mavenSettings != null && Files.exists(Paths.get(mavenSettings))) {
return mavenSettings;
}
return BootstrapMavenOptions.newInstance().getOptionValue(BootstrapMavenOptions.ALTERNATE_USER_SETTINGS);
} | } | private static String getMavenSettingsArg() {
final String mavenSettings = System.getProperty("maven.settings");
if (mavenSettings != null) {
return Files.exists(Paths.get(mavenSettings)) ? mavenSettings : null;
}
return BootstrapMavenOptions.newInstance().getOptionValue(BootstrapMavenOptions.ALTERNATE_USER_SETTINGS);
} | class WrapperRunner {
public enum Wrapper {
GRADLE("gradlew", "gradlew.bat", new String[] { "--no-daemon", "build", "-i" }),
MAVEN("mvnw", "mvnw.cmd", new String[] { "package" });
private final String execUnix;
private final String execWindows;
private final String[] cmdArgs;
Wrapper(String execUnix, String execWindows, String[] cmdArgs) {
this.execUnix = execUnix;
this.execWindows = execWindows;
this.cmdArgs = cmdArgs;
}
public String getExec() {
return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows") ? execWindows : execUnix;
}
public String[] getCmdArgs() {
return cmdArgs;
}
public static Wrapper fromBuildtool(String buildtool) {
switch (buildtool) {
case "maven":
return MAVEN;
case "gradle":
case "gradle-kotlin-dsl":
return GRADLE;
default:
throw new IllegalStateException("No wrapper linked to buildtool: " + buildtool);
}
}
public static Wrapper detect(Path projectDir) {
for (Wrapper value : Wrapper.values()) {
final File file = projectDir.resolve(value.getExec()).toFile();
if (file.isFile() && file.canExecute()) {
return value;
}
}
throw new IllegalStateException("No supported wrapper that can be executed found in this directory: " + projectDir);
}
}
public static int run(Path projectDir) {
return run(projectDir, Wrapper.detect(projectDir));
}
public static int run(Path projectDir, Wrapper wrapper) {
List<String> command = new LinkedList<>();
command.add(projectDir.resolve(wrapper.getExec()).toAbsolutePath().toString());
command.addAll(Arrays.asList(wrapper.getCmdArgs()));
propagateSystemPropertyIfSet("maven.repo.local", command);
if (wrapper == Wrapper.MAVEN) {
final String mavenSettings = getMavenSettingsArg();
if (mavenSettings != null) {
command.add("-s");
command.add(mavenSettings);
}
}
try {
System.out.println("Running command: " + command);
final Process p = new ProcessBuilder()
.directory(projectDir.toFile())
.command(command)
.start();
try {
streamToSysOutSysErr(p);
p.waitFor(10, TimeUnit.MINUTES);
return p.exitValue();
} catch (InterruptedException e) {
p.destroyForcibly();
Thread.currentThread().interrupt();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return -1;
}
private static void propagateSystemPropertyIfSet(String name, List<String> command) {
if (System.getProperties().containsKey(name)) {
final StringBuilder buf = new StringBuilder();
buf.append("-D").append(name);
final String value = System.getProperty(name);
if (value != null && !value.isEmpty()) {
buf.append("=").append(value);
}
command.add(buf.toString());
}
}
private static void streamToSysOutSysErr(final Process process) {
streamOutputToSysOut(process);
streamErrorToSysErr(process);
}
private static void streamOutputToSysOut(final Process process) {
final InputStream processStdOut = process.getInputStream();
final Thread t = new Thread(new Streamer(processStdOut, System.out));
t.setName("Process stdout streamer");
t.setDaemon(true);
t.start();
}
private static void streamErrorToSysErr(final Process process) {
streamErrorTo(System.err, process);
}
private static void streamErrorTo(final PrintStream printStream, final Process process) {
final InputStream processStdErr = process.getErrorStream();
final Thread t = new Thread(new Streamer(processStdErr, printStream));
t.setName("Process stderr streamer");
t.setDaemon(true);
t.start();
}
private static final class Streamer implements Runnable {
private final InputStream processStream;
private final PrintStream consumer;
private Streamer(final InputStream processStream, final PrintStream consumer) {
this.processStream = processStream;
this.consumer = consumer;
}
@Override
public void run() {
try (final BufferedReader reader = new BufferedReader(
new InputStreamReader(processStream, StandardCharsets.UTF_8))) {
String line = null;
while ((line = reader.readLine()) != null) {
consumer.println(line);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
} | class WrapperRunner {
public enum Wrapper {
GRADLE("gradlew", "gradlew.bat", new String[] { "--no-daemon", "build", "-i" }),
MAVEN("mvnw", "mvnw.cmd", new String[] { "package" });
private final String execUnix;
private final String execWindows;
private final String[] cmdArgs;
Wrapper(String execUnix, String execWindows, String[] cmdArgs) {
this.execUnix = execUnix;
this.execWindows = execWindows;
this.cmdArgs = cmdArgs;
}
public String getExec() {
return System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows") ? execWindows : execUnix;
}
public String[] getCmdArgs() {
return cmdArgs;
}
public static Wrapper fromBuildtool(String buildtool) {
switch (buildtool) {
case "maven":
return MAVEN;
case "gradle":
case "gradle-kotlin-dsl":
return GRADLE;
default:
throw new IllegalStateException("No wrapper linked to buildtool: " + buildtool);
}
}
public static Wrapper detect(Path projectDir) {
for (Wrapper value : Wrapper.values()) {
final File file = projectDir.resolve(value.getExec()).toFile();
if (file.isFile() && file.canExecute()) {
return value;
}
}
throw new IllegalStateException("No supported wrapper that can be executed found in this directory: " + projectDir);
}
}
public static int run(Path projectDir) {
return run(projectDir, Wrapper.detect(projectDir));
}
public static int run(Path projectDir, Wrapper wrapper) {
List<String> command = new LinkedList<>();
command.add(projectDir.resolve(wrapper.getExec()).toAbsolutePath().toString());
command.addAll(Arrays.asList(wrapper.getCmdArgs()));
propagateSystemPropertyIfSet("maven.repo.local", command);
if (wrapper == Wrapper.MAVEN) {
final String mavenSettings = getMavenSettingsArg();
if (mavenSettings != null) {
command.add("-s");
command.add(mavenSettings);
}
}
try {
System.out.println("Running command: " + command);
final Process p = new ProcessBuilder()
.directory(projectDir.toFile())
.command(command)
.start();
try {
streamToSysOutSysErr(p);
p.waitFor(10, TimeUnit.MINUTES);
return p.exitValue();
} catch (InterruptedException e) {
p.destroyForcibly();
Thread.currentThread().interrupt();
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return -1;
}
private static void propagateSystemPropertyIfSet(String name, List<String> command) {
if (System.getProperties().containsKey(name)) {
final StringBuilder buf = new StringBuilder();
buf.append("-D").append(name);
final String value = System.getProperty(name);
if (value != null && !value.isEmpty()) {
buf.append("=").append(value);
}
command.add(buf.toString());
}
}
private static void streamToSysOutSysErr(final Process process) {
streamOutputToSysOut(process);
streamErrorToSysErr(process);
}
private static void streamOutputToSysOut(final Process process) {
final InputStream processStdOut = process.getInputStream();
final Thread t = new Thread(new Streamer(processStdOut, System.out));
t.setName("Process stdout streamer");
t.setDaemon(true);
t.start();
}
private static void streamErrorToSysErr(final Process process) {
streamErrorTo(System.err, process);
}
private static void streamErrorTo(final PrintStream printStream, final Process process) {
final InputStream processStdErr = process.getErrorStream();
final Thread t = new Thread(new Streamer(processStdErr, printStream));
t.setName("Process stderr streamer");
t.setDaemon(true);
t.start();
}
private static final class Streamer implements Runnable {
private final InputStream processStream;
private final PrintStream consumer;
private Streamer(final InputStream processStream, final PrintStream consumer) {
this.processStream = processStream;
this.consumer = consumer;
}
@Override
public void run() {
try (final BufferedReader reader = new BufferedReader(
new InputStreamReader(processStream, StandardCharsets.UTF_8))) {
String line = null;
while ((line = reader.readLine()) != null) {
consumer.println(line);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
} |
I think is better to rewrite array_last(lambda,array)--->element_at(array_filter,-1) | public void analyzeImpl(Analyzer analyzer) throws AnalysisException {
FunctionName fnName = getFnName();
FunctionParams fnParams = getFnParams();
if (!LAMBDA_FUNCTION_SET.contains(fnName.getFunction().toLowerCase())) {
throw new AnalysisException(
"Function {} maybe not in the LAMBDA_FUNCTION_SET, should check the implement" + fnName
.getFunction());
}
int childSize = this.children.size();
Type[] argTypes = new Type[childSize];
for (int i = 0; i < childSize; ++i) {
this.children.get(i).analyze(analyzer);
argTypes[i] = this.children.get(i).getType();
}
if (fnName.getFunction().equalsIgnoreCase("array_map")) {
if (fnParams.exprs() == null || fnParams.exprs().size() < 2) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have at least two params");
}
if (getChild(childSize - 1) instanceof LambdaFunctionExpr) {
Type lastType = argTypes[childSize - 1];
Expr lastChild = getChild(childSize - 1);
for (int i = childSize - 1; i > 0; --i) {
argTypes[i] = getChild(i - 1).getType();
this.setChild(i, getChild(i - 1));
}
argTypes[0] = lastType;
this.setChild(0, lastChild);
}
fn = getBuiltinFunction(fnName.getFunction(), argTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
Expr lambda = this.children.get(0);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(ArrayType.create(lambda.getChild(0).getType(), true));
} else if (fnName.getFunction().equalsIgnoreCase("array_exists")) {
if (fnParams.exprs() == null || fnParams.exprs().size() < 1) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have at least one param");
}
Type[] newArgTypes = new Type[1];
if (getChild(childSize - 1) instanceof LambdaFunctionExpr) {
List<Expr> params = new ArrayList<>();
for (int i = 0; i <= childSize - 1; ++i) {
params.add(getChild(i));
}
LambdaFunctionCallExpr arrayMapFunc = new LambdaFunctionCallExpr("array_map",
params);
arrayMapFunc.analyzeImpl(analyzer);
Expr castExpr = arrayMapFunc.castTo(ArrayType.create(Type.BOOLEAN, true));
this.clearChildren();
this.addChild(castExpr);
newArgTypes[0] = castExpr.getType();
}
if (!(getChild(0) instanceof CastExpr)) {
Expr castExpr = getChild(0).castTo(ArrayType.create(Type.BOOLEAN, true));
this.setChild(0, castExpr);
newArgTypes[0] = castExpr.getType();
}
fn = getBuiltinFunction(fnName.getFunction(), newArgTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(getChild(0).getType());
} else if (fnName.getFunction().equalsIgnoreCase("array_filter")) {
if (fnParams.exprs() == null || fnParams.exprs().size() != 2) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have two params");
}
/*
* array_filter(x->x>3, [1,2,3,6,34,3,11]) --->
* array_filter([1,2,3,6,34,3,11],x->x>3)
* ---> array_filter([1,2,3,6,34,3,11], array_map(x->x>3, [1,2,3,6,34,3,11]))
*/
if (getChild(1) instanceof LambdaFunctionExpr) {
List<Expr> params = new ArrayList<>();
params.add(getChild(1));
params.add(getChild(0));
LambdaFunctionCallExpr arrayMapFunc = new LambdaFunctionCallExpr("array_map",
params);
arrayMapFunc.analyzeImpl(analyzer);
Expr castExpr = arrayMapFunc.castTo(ArrayType.create(Type.BOOLEAN, true));
this.setChild(1, castExpr);
argTypes[1] = castExpr.getType();
}
if (!(getChild(1) instanceof CastExpr)) {
Expr castExpr = getChild(1).castTo(ArrayType.create(Type.BOOLEAN, true));
this.setChild(1, castExpr);
argTypes[1] = castExpr.getType();
}
fn = getBuiltinFunction(fnName.getFunction(), argTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(getChild(0).getType());
} else if (fnName.getFunction().equalsIgnoreCase("array_sortby")) {
if (fnParams.exprs() == null || fnParams.exprs().size() < 2) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have at least two params");
}
/*
* array_sortby((x,y)->(x+y), [1,-2,3], [10,11,12]) --->
* array_sortby([1,-2,3],[10,11,12], (x,y)->(x+y))
* ---> array_sortby([1,-2,3], array_map((x,y)->(x+y), [1,-2,3], [10,11,12]))
*/
if (getChild(childSize - 1) instanceof LambdaFunctionExpr) {
List<Expr> params = new ArrayList<>();
for (int i = 0; i <= childSize - 1; ++i) {
params.add(getChild(i));
}
LambdaFunctionCallExpr arrayMapFunc = new LambdaFunctionCallExpr("array_map",
params);
arrayMapFunc.analyzeImpl(analyzer);
Expr firstExpr = getChild(0);
this.clearChildren();
this.addChild(firstExpr);
this.addChild(arrayMapFunc);
argTypes = new Type[2];
argTypes[0] = getChild(0).getType();
argTypes[1] = getChild(1).getType();
}
fn = getBuiltinFunction(fnName.getFunction(), argTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(getChild(0).getType());
} else if ( fnName.getFunction().equalsIgnoreCase("array_last") ) {
List<Expr> params = new ArrayList<>();
for (int i = 0; i <= childSize - 1; ++i) {
params.add(getChild(i));
}
LambdaFunctionCallExpr filterFunc = new LambdaFunctionCallExpr("array_filter", params);
IntLiteral indexParam = new IntLiteral(-1, Type.INT);
params = Lists.newArrayList(filterFunc, indexParam) ;
Type[] argTypesForCallExpr = new Type[2];
argTypesForCallExpr[0] = getChild(0).getType();
argTypesForCallExpr[1] = getChild(1).getType();
FunctionCallExpr callExpr = new FunctionCallExpr("element_at", params) ;
callExpr.analyzeImpl(analyzer);
this.setFnName(arrayCloneExpr) ;
this.children.clear() ;
this.children.add(callExpr);
argTypes = new Type[1] ;
argTypes[0] = getChild(0).getType();
fn = getBuiltinFunction(arrayCloneExpr.getFunction(), argTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn element_at not exists");
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(getChild(0).getType());
} else if ( fnName.getFunction().equalsIgnoreCase("array_clone_expr") ) {
fn = getBuiltinFunction(fnName.getFunction(), argTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn element_at not exists");
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(getChild(0).getType());
}
LOG.info("fn string: " + fn.signatureString() + ". return type: " + fn.getReturnType());
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
this.type = fn.getReturnType();
} | } else if ( fnName.getFunction().equalsIgnoreCase("array_clone_expr") ) { | public void analyzeImpl(Analyzer analyzer) throws AnalysisException {
FunctionName fnName = getFnName();
FunctionParams fnParams = getFnParams();
if (!LAMBDA_FUNCTION_SET.contains(fnName.getFunction().toLowerCase())) {
throw new AnalysisException(
"Function {} maybe not in the LAMBDA_FUNCTION_SET, should check the implement" + fnName
.getFunction());
}
int childSize = this.children.size();
Type[] argTypes = new Type[childSize];
for (int i = 0; i < childSize; ++i) {
this.children.get(i).analyze(analyzer);
argTypes[i] = this.children.get(i).getType();
}
if (fnName.getFunction().equalsIgnoreCase("array_map")) {
if (fnParams.exprs() == null || fnParams.exprs().size() < 2) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have at least two params");
}
if (getChild(childSize - 1) instanceof LambdaFunctionExpr) {
Type lastType = argTypes[childSize - 1];
Expr lastChild = getChild(childSize - 1);
for (int i = childSize - 1; i > 0; --i) {
argTypes[i] = getChild(i - 1).getType();
this.setChild(i, getChild(i - 1));
}
argTypes[0] = lastType;
this.setChild(0, lastChild);
}
fn = getBuiltinFunction(fnName.getFunction(), argTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
Expr lambda = this.children.get(0);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(ArrayType.create(lambda.getChild(0).getType(), true));
} else if (fnName.getFunction().equalsIgnoreCase("array_exists")
|| fnName.getFunction().equalsIgnoreCase("array_first_index")) {
if (fnParams.exprs() == null || fnParams.exprs().size() < 1) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have at least one param");
}
Type[] newArgTypes = new Type[1];
if (getChild(childSize - 1) instanceof LambdaFunctionExpr) {
List<Expr> params = new ArrayList<>();
for (int i = 0; i <= childSize - 1; ++i) {
params.add(getChild(i));
}
LambdaFunctionCallExpr arrayMapFunc = new LambdaFunctionCallExpr("array_map",
params);
arrayMapFunc.analyzeImpl(analyzer);
Expr castExpr = arrayMapFunc.castTo(ArrayType.create(Type.BOOLEAN, true));
this.clearChildren();
this.addChild(castExpr);
newArgTypes[0] = castExpr.getType();
}
if (!(getChild(0) instanceof CastExpr)) {
Expr castExpr = getChild(0).castTo(ArrayType.create(Type.BOOLEAN, true));
this.setChild(0, castExpr);
newArgTypes[0] = castExpr.getType();
}
fn = getBuiltinFunction(fnName.getFunction(), newArgTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
} else if (fnName.getFunction().equalsIgnoreCase("array_filter")) {
if (fnParams.exprs() == null || fnParams.exprs().size() != 2) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have two params");
}
/*
* array_filter(x->x>3, [1,2,3,6,34,3,11]) --->
* array_filter([1,2,3,6,34,3,11],x->x>3)
* ---> array_filter([1,2,3,6,34,3,11], array_map(x->x>3, [1,2,3,6,34,3,11]))
*/
if (getChild(1) instanceof LambdaFunctionExpr) {
List<Expr> params = new ArrayList<>();
params.add(getChild(1));
params.add(getChild(0));
LambdaFunctionCallExpr arrayMapFunc = new LambdaFunctionCallExpr("array_map",
params);
arrayMapFunc.analyzeImpl(analyzer);
Expr castExpr = arrayMapFunc.castTo(ArrayType.create(Type.BOOLEAN, true));
this.setChild(1, castExpr);
argTypes[1] = castExpr.getType();
}
if (!(getChild(1) instanceof CastExpr)) {
Expr castExpr = getChild(1).castTo(ArrayType.create(Type.BOOLEAN, true));
this.setChild(1, castExpr);
argTypes[1] = castExpr.getType();
}
fn = getBuiltinFunction(fnName.getFunction(), argTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(getChild(0).getType());
} else if (fnName.getFunction().equalsIgnoreCase("array_sortby")) {
if (fnParams.exprs() == null || fnParams.exprs().size() < 2) {
throw new AnalysisException("The " + fnName.getFunction() + " function must have at least two params");
}
/*
* array_sortby((x,y)->(x+y), [1,-2,3], [10,11,12]) --->
* array_sortby([1,-2,3],[10,11,12], (x,y)->(x+y))
* ---> array_sortby([1,-2,3], array_map((x,y)->(x+y), [1,-2,3], [10,11,12]))
*/
if (getChild(childSize - 1) instanceof LambdaFunctionExpr) {
List<Expr> params = new ArrayList<>();
for (int i = 0; i <= childSize - 1; ++i) {
params.add(getChild(i));
}
LambdaFunctionCallExpr arrayMapFunc = new LambdaFunctionCallExpr("array_map",
params);
arrayMapFunc.analyzeImpl(analyzer);
Expr firstExpr = getChild(0);
this.clearChildren();
this.addChild(firstExpr);
this.addChild(arrayMapFunc);
argTypes = new Type[2];
argTypes[0] = getChild(0).getType();
argTypes[1] = getChild(1).getType();
}
fn = getBuiltinFunction(fnName.getFunction(), argTypes,
Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(getChild(0).getType());
} else if (fnName.getFunction().equalsIgnoreCase("array_last")) {
if (getChild(childSize - 1) instanceof LambdaFunctionExpr) {
List<Expr> params = new ArrayList<>();
for (int i = 0; i <= childSize - 1; ++i) {
params.add(getChild(i));
}
LambdaFunctionCallExpr arrayFilterFunc = new LambdaFunctionCallExpr("array_filter", params);
arrayFilterFunc.analyzeImpl(analyzer);
IntLiteral indexParam = new IntLiteral(-1, Type.INT);
argTypes = new Type[2];
argTypes[0] = getChild(0).getType();
argTypes[1] = indexParam.getType();
this.children.clear();
this.children.add(arrayFilterFunc);
this.children.add(indexParam);
}
fnName = new FunctionName(null, "element_at");
fn = getBuiltinFunction(fnName.getFunction(), argTypes, Function.CompareMode.IS_NONSTRICT_SUPERTYPE_OF);
if (fn == null) {
LOG.warn("fn element_at not exists");
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
fn.setReturnType(((ArrayType) argTypes[0]).getItemType());
}
LOG.info("fn string: " + fn.signatureString() + ". return type: " + fn.getReturnType());
if (fn == null) {
LOG.warn("fn {} not exists", this.toSqlImpl());
throw new AnalysisException(getFunctionNotFoundError(collectChildReturnTypes()));
}
this.type = fn.getReturnType();
} | class LambdaFunctionCallExpr extends FunctionCallExpr {
public static final ImmutableSet<String> LAMBDA_FUNCTION_SET = new ImmutableSortedSet.Builder(
String.CASE_INSENSITIVE_ORDER).add("array_map").add("array_filter").add("array_exists").add("array_sortby").add("array_clone_expr")
.add("array_last")
.build();
public static final ImmutableSet<String> LAMBDA_MAPPED_FUNCTION_SET = new ImmutableSortedSet.Builder(
String.CASE_INSENSITIVE_ORDER).add("array_exists").add("array_sortby").build();
private static final Logger LOG = LogManager.getLogger(LambdaFunctionCallExpr.class);
private static final FunctionName arrayCloneExpr = new FunctionName(null, "array_clone_expr") ;
public LambdaFunctionCallExpr(String functionName, List<Expr> params) {
super(functionName, params);
}
public LambdaFunctionCallExpr(FunctionName functionName, List<Expr> params) {
super(functionName, params);
}
public LambdaFunctionCallExpr(LambdaFunctionCallExpr other) {
super(other);
}
@Override
public Expr clone() {
return new LambdaFunctionCallExpr(this);
}
@Override
@Override
protected void toThrift(TExprNode msg) {
FunctionName fnName = getFnName();
if (LAMBDA_MAPPED_FUNCTION_SET.contains(fnName.getFunction().toLowerCase())) {
msg.node_type = TExprNodeType.FUNCTION_CALL;
} else {
msg.node_type = TExprNodeType.LAMBDA_FUNCTION_CALL_EXPR;
}
}
} | class LambdaFunctionCallExpr extends FunctionCallExpr {
public static final ImmutableSet<String> LAMBDA_FUNCTION_SET = new ImmutableSortedSet.Builder(
String.CASE_INSENSITIVE_ORDER).add("array_map").add("array_filter").add("array_exists").add("array_sortby")
.add("array_first_index").add("array_last").build();
public static final ImmutableSet<String> LAMBDA_MAPPED_FUNCTION_SET = new ImmutableSortedSet.Builder(
String.CASE_INSENSITIVE_ORDER).add("array_exists").add("array_sortby")
.add("array_first_index").add("array_last")
.build();
private static final Logger LOG = LogManager.getLogger(LambdaFunctionCallExpr.class);
public LambdaFunctionCallExpr(String functionName, List<Expr> params) {
super(functionName, params);
}
public LambdaFunctionCallExpr(FunctionName functionName, List<Expr> params) {
super(functionName, params);
}
public LambdaFunctionCallExpr(LambdaFunctionCallExpr other) {
super(other);
}
@Override
public Expr clone() {
return new LambdaFunctionCallExpr(this);
}
@Override
@Override
protected void toThrift(TExprNode msg) {
FunctionName fnName = getFnName();
if (LAMBDA_MAPPED_FUNCTION_SET.contains(fnName.getFunction().toLowerCase())) {
msg.node_type = TExprNodeType.FUNCTION_CALL;
} else {
msg.node_type = TExprNodeType.LAMBDA_FUNCTION_CALL_EXPR;
}
}
} |
```suggestion keySpecifierNode.fieldNames().forEach(field -> this.addSemanticToken(field, TokenTypes.PROPERTY.getId(), TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), false, -1, -1)); ``` | public void visit(KeySpecifierNode keySpecifierNode) {
keySpecifierNode.fieldNames().forEach(field -> {
this.addSemanticToken(field, TokenTypes.PROPERTY.getId(), TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId(), false, -1, -1);
});
visitSyntaxNode(keySpecifierNode);
} | }); | public void visit(KeySpecifierNode keySpecifierNode) {
keySpecifierNode.fieldNames().forEach(field -> this.addSemanticToken(field, TokenTypes.PROPERTY.getId(),
TokenTypeModifiers.DECLARATION.getId(), false, -1, -1));
visitSyntaxNode(keySpecifierNode);
} | class SemanticTokensVisitor extends NodeVisitor {
private final Set<SemanticToken> semanticTokens;
private final SemanticTokensContext semanticTokensContext;
public SemanticTokensVisitor(SemanticTokensContext semanticTokensContext) {
this.semanticTokens = new TreeSet<>(SemanticToken.semanticTokenComparator);
this.semanticTokensContext = semanticTokensContext;
}
/**
* Collects semantic tokens while traversing the semantic tress and returns the processed list of semantic tokens
* for highlighting.
*
* @param node Root node
* @return {@link SemanticTokens}
*/
public SemanticTokens visitSemanticTokens(Node node) {
List<Integer> data = new ArrayList<>();
visitSyntaxNode(node);
SemanticToken previousToken = null;
for (SemanticToken semanticToken : this.semanticTokens) {
previousToken = semanticToken.processSemanticToken(data, previousToken);
}
return new SemanticTokens(data);
}
public void visit(ImportDeclarationNode importDeclarationNode) {
Optional<ImportPrefixNode> importPrefixNode = importDeclarationNode.prefix();
importPrefixNode.ifPresent(prefixNode -> this.addSemanticToken(prefixNode.prefix(),
TokenTypes.NAMESPACE.getId(), TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.NAMESPACE.getId(), 0));
visitSyntaxNode(importDeclarationNode);
}
public void visit(FunctionDefinitionNode functionDefinitionNode) {
LinePosition startLine = functionDefinitionNode.functionName().lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset());
if (!semanticTokens.contains(semanticToken)) {
int length = functionDefinitionNode.functionName().text().length();
int type = functionDefinitionNode.kind() == SyntaxKind.OBJECT_METHOD_DEFINITION ?
TokenTypes.METHOD.getId() : TokenTypes.FUNCTION.getId();
semanticToken.setProperties(length, type, TokenTypeModifiers.DECLARATION.getId());
semanticTokens.add(semanticToken);
if (functionDefinitionNode.kind() == SyntaxKind.RESOURCE_ACCESSOR_DEFINITION) {
functionDefinitionNode.relativeResourcePath().forEach(resourcePath -> {
SemanticToken resourcePathToken = new SemanticToken(resourcePath.lineRange().startLine().line(),
resourcePath.lineRange().startLine().offset(), resourcePath.textRange().length(), type,
TokenTypeModifiers.DECLARATION.getId());
semanticTokens.add(resourcePathToken);
});
} else {
handleReferences(startLine, length, type, 0);
}
}
visitSyntaxNode(functionDefinitionNode);
}
public void visit(MethodDeclarationNode methodDeclarationNode) {
this.addSemanticToken(methodDeclarationNode.methodName(), TokenTypes.METHOD.getId(),
TokenTypeModifiers.DECLARATION.getId(), false, -1, -1);
visitSyntaxNode(methodDeclarationNode);
}
public void visit(FunctionCallExpressionNode functionCallExpressionNode) {
Node functionName = functionCallExpressionNode.functionName();
if (functionName instanceof QualifiedNameReferenceNode) {
functionName = ((QualifiedNameReferenceNode) functionName).identifier();
}
this.addSemanticToken(functionName, TokenTypes.FUNCTION.getId(), 0, false, -1, -1);
visitSyntaxNode(functionCallExpressionNode);
}
public void visit(MethodCallExpressionNode methodCallExpressionNode) {
this.addSemanticToken(methodCallExpressionNode.methodName(), TokenTypes.METHOD.getId(),
TokenTypeModifiers.DECLARATION.getId(), false, -1, -1);
visitSyntaxNode(methodCallExpressionNode);
}
public void visit(RequiredParameterNode requiredParameterNode) {
boolean isReadonly = isReadonly(requiredParameterNode.typeName());
requiredParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(),
isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() :
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), isReadonly ?
TokenTypeModifiers.READONLY.getId() : 0));
visitSyntaxNode(requiredParameterNode);
}
public void visit(CaptureBindingPatternNode captureBindingPatternNode) {
boolean readonly = false;
if (captureBindingPatternNode.parent() instanceof TypedBindingPatternNode) {
readonly = this.isReadonly(((TypedBindingPatternNode) captureBindingPatternNode.parent()).typeDescriptor());
}
this.addSemanticToken(captureBindingPatternNode, TokenTypes.VARIABLE.getId(), readonly ?
TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() :
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.VARIABLE.getId(), readonly ?
TokenTypeModifiers.READONLY.getId() : 0);
visitSyntaxNode(captureBindingPatternNode);
}
public void visit(SimpleNameReferenceNode simpleNameReferenceNode) {
if (!SemanticTokensConstants.SELF.equals(simpleNameReferenceNode.name().text())) {
processSymbols(simpleNameReferenceNode, simpleNameReferenceNode.lineRange().startLine());
}
visitSyntaxNode(simpleNameReferenceNode);
}
public void visit(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
this.addSemanticToken(qualifiedNameReferenceNode.modulePrefix(), TokenTypes.NAMESPACE.getId(), 0, false, -1,
-1);
Token identifier = qualifiedNameReferenceNode.identifier();
processSymbols(identifier, identifier.lineRange().startLine());
visitSyntaxNode(qualifiedNameReferenceNode);
}
public void visit(ConstantDeclarationNode constantDeclarationNode) {
this.addSemanticToken(constantDeclarationNode.variableName(), TokenTypes.VARIABLE.getId(),
TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true,
TokenTypes.VARIABLE.getId(), TokenTypeModifiers.READONLY.getId());
visitSyntaxNode(constantDeclarationNode);
}
public void visit(ClassDefinitionNode classDefinitionNode) {
boolean isReadonly = false;
if (!classDefinitionNode.classTypeQualifiers().isEmpty() &&
classDefinitionNode.classTypeQualifiers().stream().anyMatch(qualifier -> {
return qualifier.text().equals(SemanticTokensConstants.READONLY);
})) {
isReadonly = true;
}
this.addSemanticToken(classDefinitionNode.className(), TokenTypes.CLASS.getId(),
isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() :
TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.CLASS.getId(), isReadonly ? TokenTypeModifiers.READONLY.getId() : 0);
visitSyntaxNode(classDefinitionNode);
}
public void visit(ServiceDeclarationNode serviceDeclarationNode) {
serviceDeclarationNode.absoluteResourcePath().forEach(serviceName -> {
LinePosition startLine = serviceName.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset(),
serviceName.textRange().length(), TokenTypes.TYPE.getId(),
TokenTypeModifiers.DECLARATION.getId());
semanticTokens.add(semanticToken);
});
visitSyntaxNode(serviceDeclarationNode);
}
public void visit(EnumDeclarationNode enumDeclarationNode) {
this.addSemanticToken(enumDeclarationNode.identifier(), TokenTypes.ENUM.getId(),
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.ENUM.getId(), 0);
visitSyntaxNode(enumDeclarationNode);
}
public void visit(EnumMemberNode enumMemberNode) {
this.addSemanticToken(enumMemberNode.identifier(), TokenTypes.ENUM_MEMBER.getId(),
TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true,
TokenTypes.ENUM_MEMBER.getId(), TokenTypeModifiers.READONLY.getId());
visitSyntaxNode(enumMemberNode);
}
public void visit(AnnotationNode annotationNode) {
this.addSemanticToken(annotationNode.atToken(), TokenTypes.NAMESPACE.getId(), 0, false, -1, -1);
visitSyntaxNode(annotationNode);
}
public void visit(MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) {
int type;
switch (markdownParameterDocumentationLineNode.parent().parent().parent().kind()) {
case RECORD_FIELD:
case OBJECT_FIELD:
type = TokenTypes.PROPERTY.getId();
break;
case TYPE_DEFINITION:
Node node = markdownParameterDocumentationLineNode.parent().parent().parent();
type = TokenTypes.TYPE_PARAMETER.getId();
if (node instanceof TypeDefinitionNode) {
SyntaxKind kind = ((TypeDefinitionNode) node).typeDescriptor().kind();
if (kind == SyntaxKind.OBJECT_TYPE_DESC || kind == SyntaxKind.RECORD_TYPE_DESC) {
type = TokenTypes.PROPERTY.getId();
}
}
break;
default:
type = TokenTypes.PARAMETER.getId();
break;
}
this.addSemanticToken(markdownParameterDocumentationLineNode.parameterName(), type,
TokenTypeModifiers.DOCUMENTATION.getId(), false, -1, -1);
visitSyntaxNode(markdownParameterDocumentationLineNode);
}
public void visit(TypeDefinitionNode typeDefinitionNode) {
int type = TokenTypes.TYPE.getId();
int modifiers = 0;
Node typeDescriptor = typeDefinitionNode.typeDescriptor();
switch (typeDescriptor.kind()) {
case OBJECT_TYPE_DESC:
type = TokenTypes.INTERFACE.getId();
modifiers = TokenTypeModifiers.DECLARATION.getId();
break;
case RECORD_TYPE_DESC:
type = TokenTypes.STRUCT.getId();
modifiers = TokenTypeModifiers.DECLARATION.getId();
break;
case INTERSECTION_TYPE_DESC:
if (typeDescriptor instanceof IntersectionTypeDescriptorNode) {
IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) typeDescriptor;
SyntaxKind left = intSecDescriptor.leftTypeDesc().kind();
SyntaxKind right = intSecDescriptor.rightTypeDesc().kind();
if (left == SyntaxKind.RECORD_TYPE_DESC || right == SyntaxKind.RECORD_TYPE_DESC) {
type = TokenTypes.STRUCT.getId();
if (left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC) {
modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId();
} else {
modifiers = TokenTypeModifiers.DECLARATION.getId();
}
}
}
break;
default:
type = TokenTypes.TYPE.getId();
modifiers = TokenTypeModifiers.DECLARATION.getId();
break;
}
this.addSemanticToken(typeDefinitionNode.typeName(), type, modifiers, true, type, 0);
visitSyntaxNode(typeDefinitionNode);
}
public void visit(RecordFieldNode recordFieldNode) {
Token token = recordFieldNode.fieldName();
LinePosition startLine = token.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset());
if (!semanticTokens.contains(semanticToken)) {
int length = token.text().trim().length();
int modifiers;
int refModifiers;
if (recordFieldNode.readonlyKeyword().isPresent()) {
modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId();
refModifiers = TokenTypeModifiers.READONLY.getId();
} else {
modifiers = TokenTypeModifiers.DECLARATION.getId();
refModifiers = 0;
}
semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers);
semanticTokens.add(semanticToken);
handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers);
}
visitSyntaxNode(recordFieldNode);
}
public void visit(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) {
Token token = recordFieldWithDefaultValueNode.fieldName();
LinePosition startLine = token.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset());
if (!semanticTokens.contains(semanticToken)) {
int length = token.text().trim().length();
int modifiers;
int refModifiers;
if (recordFieldWithDefaultValueNode.readonlyKeyword().isPresent()) {
modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId();
refModifiers = TokenTypeModifiers.READONLY.getId();
} else {
modifiers = TokenTypeModifiers.DECLARATION.getId();
refModifiers = 0;
}
semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers);
semanticTokens.add(semanticToken);
handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers);
}
visitSyntaxNode(recordFieldWithDefaultValueNode);
}
public void visit(ObjectFieldNode objectFieldNode) {
SyntaxKind kind = objectFieldNode.parent().kind();
int type = kind == SyntaxKind.CLASS_DEFINITION || kind == SyntaxKind.OBJECT_TYPE_DESC ||
kind == SyntaxKind.RECORD_TYPE_DESC || kind == SyntaxKind.OBJECT_CONSTRUCTOR ?
TokenTypes.PROPERTY.getId() : TokenTypes.TYPE_PARAMETER.getId();
this.addSemanticToken(objectFieldNode.fieldName(), type, TokenTypeModifiers.DECLARATION.getId(), true, type,
0);
visitSyntaxNode(objectFieldNode);
}
public void visit(AnnotationDeclarationNode annotationDeclarationNode) {
this.addSemanticToken(annotationDeclarationNode.annotationTag(), TokenTypes.TYPE.getId(),
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.TYPE.getId(), 0);
visitSyntaxNode(annotationDeclarationNode);
}
public void visit(DefaultableParameterNode defaultableParameterNode) {
defaultableParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token,
TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.PARAMETER.getId(), 0));
visitSyntaxNode(defaultableParameterNode);
}
public void visit(IncludedRecordParameterNode includedRecordParameterNode) {
includedRecordParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token,
TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.PARAMETER.getId(), 0));
visitSyntaxNode(includedRecordParameterNode);
}
public void visit(RestParameterNode restParameterNode) {
restParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(),
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0));
visitSyntaxNode(restParameterNode);
}
/**
* Returns if the given IntersectionTypeDescriptorNode has a readonly typeDescriptor.
*
* @param node Current node
* @return True if a readonly typeDescriptor is present, false otherwise.
*/
private boolean isReadonly(Node node) {
if (node instanceof IntersectionTypeDescriptorNode) {
IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) node;
SyntaxKind left = intSecDescriptor.leftTypeDesc().kind();
SyntaxKind right = intSecDescriptor.rightTypeDesc().kind();
return left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC;
}
return false;
}
/**
* Get the symbol of the given node and process the semantic tokens for the symbol and it's references.
*
* @param node Current node
* @param startLine Start line position
*/
private void processSymbols(Node node, LinePosition startLine) {
if (!semanticTokens.contains(new SemanticToken(startLine.line(), startLine.offset()))) {
Optional<SemanticModel> semanticModel;
try {
semanticModel = this.semanticTokensContext.currentSemanticModel();
} catch (Throwable e) {
return;
}
if (semanticModel.isEmpty()) {
return;
}
Optional<Symbol> symbol = semanticModel.get().symbol(node);
if (symbol.isPresent() && symbol.get().getLocation().isPresent()) {
LineRange symbolLineRange = symbol.get().getLocation().get().lineRange();
LinePosition linePosition = symbolLineRange.startLine();
SymbolKind kind = symbol.get().kind();
String nodeName = node.toString().trim();
int declarationType = -1, declarationModifiers = -1, referenceType = -1, referenceModifiers = -1;
switch (kind) {
case CLASS:
if (!nodeName.equals(SemanticTokensConstants.SELF)) {
declarationType = TokenTypes.CLASS.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.CLASS.getId();
}
break;
case CLASS_FIELD:
declarationType = TokenTypes.PROPERTY.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.PROPERTY.getId();
break;
case CONSTANT:
declarationType = TokenTypes.VARIABLE.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceType = TokenTypes.VARIABLE.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
break;
case VARIABLE:
boolean isReadonly = false;
if (symbol.get() instanceof VariableSymbol && ((VariableSymbol) symbol.get()).typeDescriptor()
instanceof IntersectionTypeSymbol && ((IntersectionTypeSymbol) ((VariableSymbol) symbol
.get()).typeDescriptor()).memberTypeDescriptors().stream().anyMatch(desc ->
desc.typeKind() == TypeDescKind.READONLY)) {
isReadonly = true;
}
if (!nodeName.equals(SemanticTokensConstants.SELF)) {
declarationType = TokenTypes.VARIABLE.getId();
declarationModifiers = isReadonly ? TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.VARIABLE.getId();
referenceModifiers = isReadonly ? TokenTypeModifiers.READONLY.getId() : 0;
}
break;
case TYPE:
if (symbol.get() instanceof TypeReferenceTypeSymbol) {
TypeSymbol typeDescriptor = ((TypeReferenceTypeSymbol) symbol.get()).typeDescriptor();
int type = TokenTypes.TYPE.getId();
switch (typeDescriptor.kind()) {
case CLASS:
type = TokenTypes.CLASS.getId();
if (typeDescriptor instanceof ClassSymbol &&
((ClassSymbol) typeDescriptor).qualifiers().contains(Qualifier.READONLY)) {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
} else {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
break;
case TYPE:
switch (typeDescriptor.typeKind()) {
case RECORD:
type = TokenTypes.STRUCT.getId();
break;
case OBJECT:
type = TokenTypes.INTERFACE.getId();
break;
case INTERSECTION:
if (typeDescriptor instanceof IntersectionTypeSymbol) {
IntersectionTypeSymbol intSecSymbol =
(IntersectionTypeSymbol) typeDescriptor;
if (intSecSymbol.effectiveTypeDescriptor().typeKind() ==
TypeDescKind.RECORD) {
type = TokenTypes.STRUCT.getId();
if (intSecSymbol.memberTypeDescriptors().stream().anyMatch(desc ->
desc.typeKind() == TypeDescKind.READONLY)) {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
} else {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
}
}
break;
default:
type = TokenTypes.TYPE.getId();
break;
}
break;
default:
type = TokenTypes.TYPE.getId();
break;
}
declarationType = type;
referenceType = type;
} else {
declarationType = TokenTypes.TYPE.getId();
referenceType = TokenTypes.TYPE.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
break;
case RECORD_FIELD:
declarationType = TokenTypes.PROPERTY.getId();
referenceType = TokenTypes.PROPERTY.getId();
if (symbol.get() instanceof RecordFieldSymbol &&
((RecordFieldSymbol) symbol.get()).qualifiers().contains(Qualifier.READONLY)) {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
} else {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
break;
case ENUM_MEMBER:
declarationType = TokenTypes.ENUM_MEMBER.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.ENUM_MEMBER.getId();
break;
case FUNCTION:
declarationType = TokenTypes.FUNCTION.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.FUNCTION.getId();
break;
case METHOD:
declarationType = TokenTypes.METHOD.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.METHOD.getId();
break;
case ANNOTATION:
this.addSemanticToken(node, TokenTypes.TYPE.getId(), 0, false, -1, -1);
break;
default:
break;
}
Path path = this.semanticTokensContext.filePath().getFileName();
if (path == null) {
return;
}
if (declarationType != -1) {
if (symbolLineRange.filePath().equals(path.toString())) {
SemanticToken semanticToken = new SemanticToken(linePosition.line(), linePosition.offset());
if (!semanticTokens.contains(semanticToken)) {
semanticToken.setProperties(node.textRange().length(), declarationType,
declarationModifiers == -1 ? 0 : declarationModifiers);
semanticTokens.add(semanticToken);
}
}
}
if (referenceType != -1) {
final int type = referenceType;
final int modifiers = referenceModifiers == -1 ? 0 : referenceModifiers;
semanticModel.get().references(symbol.get(), false).stream().filter(location ->
location.lineRange().filePath().equals(path.toString())).forEach(location -> {
LinePosition position = location.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(position.line(), position.offset());
if (!semanticTokens.contains(semanticToken)) {
semanticToken.setProperties(node.textRange().length(), type, modifiers);
semanticTokens.add(semanticToken);
}
});
}
}
}
}
/**
* Adds a semantic token instance into the semanticTokens set for the given node.
*
* @param node Current node
* @param type Semantic token type's index
* @param modifiers Semantic token type modifiers' index
* @param processReferences True if node references should be processed, false otherwise
* @param refType Reference's semantic token type's index
* @param refModifiers Reference's semantic token type modifiers' index
*/
private void addSemanticToken(Node node, int type, int modifiers, boolean processReferences, int refType,
int refModifiers) {
LinePosition startLine = node.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset());
if (!semanticTokens.contains(semanticToken)) {
int length = node instanceof Token ? ((Token) node).text().trim().length() : node.textRange().length();
semanticToken.setProperties(length, type, modifiers);
semanticTokens.add(semanticToken);
if (processReferences) {
handleReferences(startLine, length, refType, refModifiers);
}
}
}
/**
* Handles references of the node that is located in the given position.
*
* @param linePosition Start position of the node
* @param length Length to highlight
* @param type Semantic token type's index
* @param modifiers Semantic token type modifiers' index
*/
private void handleReferences(LinePosition linePosition, int length, int type, int modifiers) {
Optional<SemanticModel> semanticModel;
try {
semanticModel = this.semanticTokensContext.currentSemanticModel();
} catch (Throwable e) {
return;
}
if (semanticModel.isEmpty()) {
return;
}
Optional<Document> docOptional = this.semanticTokensContext.currentDocument();
if (docOptional.isEmpty()) {
return;
}
Path path = this.semanticTokensContext.filePath().getFileName();
if (path == null) {
return;
}
semanticModel.get().references(docOptional.get(), linePosition).stream().filter(location ->
location.lineRange().filePath().equals(path.toString())).forEach(location -> {
LinePosition position = location.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(position.line(), position.offset());
if (!semanticTokens.contains(semanticToken)) {
semanticToken.setProperties(length, type, modifiers);
semanticTokens.add(semanticToken);
}
});
}
/**
* Represents semantic token data for a node.
*/
static class SemanticToken implements Comparable<SemanticToken> {
private final int line;
private final int column;
private int length;
private int type;
private int modifiers;
private SemanticToken(int line, int column) {
this.line = line;
this.column = column;
}
private SemanticToken(int line, int column, int length, int type, int modifiers) {
this.line = line;
this.column = column;
this.length = length;
this.type = type;
this.modifiers = modifiers;
}
private int getLine() {
return line;
}
private int getColumn() {
return column;
}
private int getLength() {
return length;
}
private int getType() {
return type;
}
private int getModifiers() {
return modifiers;
}
public void setProperties(int length, int type, int modifiers) {
this.length = length;
this.type = type;
this.modifiers = modifiers;
}
public SemanticToken processSemanticToken(List<Integer> data, SemanticToken previousToken) {
int line = this.getLine();
int column = this.getColumn();
int prevTokenLine = line;
int prevTokenColumn = column;
if (previousToken != null) {
if (line == previousToken.getLine()) {
column -= previousToken.getColumn();
}
line -= previousToken.getLine();
}
data.add(line);
data.add(column);
data.add(this.getLength());
data.add(this.getType());
data.add(this.getModifiers());
return new SemanticToken(prevTokenLine, prevTokenColumn);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SemanticToken semanticToken = (SemanticToken) obj;
return line == semanticToken.line && column == semanticToken.column;
}
@Override
public int hashCode() {
return Objects.hash(line, column);
}
@Override
public int compareTo(SemanticToken semanticToken) {
if (this.line == semanticToken.line) {
return this.column - semanticToken.column;
}
return this.line - semanticToken.line;
}
public static Comparator<SemanticToken> semanticTokenComparator = SemanticToken::compareTo;
}
} | class SemanticTokensVisitor extends NodeVisitor {
private final Set<SemanticToken> semanticTokens;
private final SemanticTokensContext semanticTokensContext;
public SemanticTokensVisitor(SemanticTokensContext semanticTokensContext) {
this.semanticTokens = new TreeSet<>(SemanticToken.semanticTokenComparator);
this.semanticTokensContext = semanticTokensContext;
}
/**
* Collects semantic tokens while traversing the semantic tress and returns the processed list of semantic tokens
* for highlighting.
*
* @param node Root node
* @return {@link SemanticTokens}
*/
public SemanticTokens getSemanticTokens(Node node) {
List<Integer> data = new ArrayList<>();
visitSyntaxNode(node);
SemanticToken previousToken = null;
for (SemanticToken semanticToken : this.semanticTokens) {
previousToken = semanticToken.processSemanticToken(data, previousToken);
}
return new SemanticTokens(data);
}
public void visit(ImportDeclarationNode importDeclarationNode) {
Optional<ImportPrefixNode> importPrefixNode = importDeclarationNode.prefix();
importPrefixNode.ifPresent(prefixNode -> this.addSemanticToken(prefixNode.prefix(),
TokenTypes.NAMESPACE.getId(), TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.NAMESPACE.getId(), 0));
visitSyntaxNode(importDeclarationNode);
}
public void visit(FunctionDefinitionNode functionDefinitionNode) {
int type = functionDefinitionNode.kind() == SyntaxKind.OBJECT_METHOD_DEFINITION ? TokenTypes.METHOD.getId() :
TokenTypes.FUNCTION.getId();
if (functionDefinitionNode.kind() == SyntaxKind.RESOURCE_ACCESSOR_DEFINITION) {
this.addSemanticToken(functionDefinitionNode.functionName(), type, TokenTypeModifiers.DECLARATION.getId(),
false, -1, -1);
functionDefinitionNode.relativeResourcePath().forEach(resourcePath -> {
this.addSemanticToken(resourcePath, type, TokenTypeModifiers.DECLARATION.getId(), false, -1, -1);
});
} else {
this.addSemanticToken(functionDefinitionNode.functionName(), type, TokenTypeModifiers.DECLARATION.getId(),
true, type, 0);
}
visitSyntaxNode(functionDefinitionNode);
}
public void visit(MethodDeclarationNode methodDeclarationNode) {
this.addSemanticToken(methodDeclarationNode.methodName(), TokenTypes.METHOD.getId(),
TokenTypeModifiers.DECLARATION.getId(), false, -1, -1);
visitSyntaxNode(methodDeclarationNode);
}
public void visit(FunctionCallExpressionNode functionCallExpressionNode) {
Node functionName = functionCallExpressionNode.functionName();
if (functionName instanceof QualifiedNameReferenceNode) {
functionName = ((QualifiedNameReferenceNode) functionName).identifier();
}
this.addSemanticToken(functionName, TokenTypes.FUNCTION.getId(), 0, false, -1, -1);
visitSyntaxNode(functionCallExpressionNode);
}
public void visit(MethodCallExpressionNode methodCallExpressionNode) {
this.addSemanticToken(methodCallExpressionNode.methodName(), TokenTypes.METHOD.getId(),
TokenTypeModifiers.DECLARATION.getId(), false, -1, -1);
visitSyntaxNode(methodCallExpressionNode);
}
public void visit(RequiredParameterNode requiredParameterNode) {
boolean isReadonly = isReadonly(requiredParameterNode.typeName());
requiredParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(),
isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() :
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), isReadonly ?
TokenTypeModifiers.READONLY.getId() : 0));
visitSyntaxNode(requiredParameterNode);
}
public void visit(CaptureBindingPatternNode captureBindingPatternNode) {
boolean readonly = false;
if (captureBindingPatternNode.parent() instanceof TypedBindingPatternNode) {
readonly = this.isReadonly(((TypedBindingPatternNode) captureBindingPatternNode.parent()).typeDescriptor());
}
this.addSemanticToken(captureBindingPatternNode, TokenTypes.VARIABLE.getId(), readonly ?
TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() :
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.VARIABLE.getId(), readonly ?
TokenTypeModifiers.READONLY.getId() : 0);
visitSyntaxNode(captureBindingPatternNode);
}
public void visit(SimpleNameReferenceNode simpleNameReferenceNode) {
if (!SemanticTokensConstants.SELF.equals(simpleNameReferenceNode.name().text())) {
processSymbols(simpleNameReferenceNode, simpleNameReferenceNode.lineRange().startLine());
}
visitSyntaxNode(simpleNameReferenceNode);
}
public void visit(QualifiedNameReferenceNode qualifiedNameReferenceNode) {
this.addSemanticToken(qualifiedNameReferenceNode.modulePrefix(), TokenTypes.NAMESPACE.getId(), 0, false, -1,
-1);
Token identifier = qualifiedNameReferenceNode.identifier();
processSymbols(identifier, identifier.lineRange().startLine());
visitSyntaxNode(qualifiedNameReferenceNode);
}
public void visit(ConstantDeclarationNode constantDeclarationNode) {
this.addSemanticToken(constantDeclarationNode.variableName(), TokenTypes.VARIABLE.getId(),
TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true,
TokenTypes.VARIABLE.getId(), TokenTypeModifiers.READONLY.getId());
visitSyntaxNode(constantDeclarationNode);
}
public void visit(ClassDefinitionNode classDefinitionNode) {
boolean isReadonly = false;
if (!classDefinitionNode.classTypeQualifiers().isEmpty() &&
classDefinitionNode.classTypeQualifiers().stream().anyMatch(qualifier ->
qualifier.text().equals(SemanticTokensConstants.READONLY))) {
isReadonly = true;
}
this.addSemanticToken(classDefinitionNode.className(), TokenTypes.CLASS.getId(),
isReadonly ? TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId() :
TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.CLASS.getId(), isReadonly ? TokenTypeModifiers.READONLY.getId() : 0);
visitSyntaxNode(classDefinitionNode);
}
public void visit(ServiceDeclarationNode serviceDeclarationNode) {
serviceDeclarationNode.absoluteResourcePath().forEach(serviceName -> {
LinePosition startLine = serviceName.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset(),
serviceName.textRange().length(), TokenTypes.TYPE.getId(),
TokenTypeModifiers.DECLARATION.getId());
semanticTokens.add(semanticToken);
});
visitSyntaxNode(serviceDeclarationNode);
}
public void visit(EnumDeclarationNode enumDeclarationNode) {
this.addSemanticToken(enumDeclarationNode.identifier(), TokenTypes.ENUM.getId(),
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.ENUM.getId(), 0);
visitSyntaxNode(enumDeclarationNode);
}
public void visit(EnumMemberNode enumMemberNode) {
this.addSemanticToken(enumMemberNode.identifier(), TokenTypes.ENUM_MEMBER.getId(),
TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId(), true,
TokenTypes.ENUM_MEMBER.getId(), TokenTypeModifiers.READONLY.getId());
visitSyntaxNode(enumMemberNode);
}
public void visit(AnnotationNode annotationNode) {
this.addSemanticToken(annotationNode.atToken(), TokenTypes.NAMESPACE.getId(), 0, false, -1, -1);
visitSyntaxNode(annotationNode);
}
public void visit(MarkdownParameterDocumentationLineNode markdownParameterDocumentationLineNode) {
if (!markdownParameterDocumentationLineNode.parameterName().text().equals(SemanticTokensConstants.RETURN)) {
int type;
switch (markdownParameterDocumentationLineNode.parent().parent().parent().kind()) {
case RECORD_FIELD:
case OBJECT_FIELD:
type = TokenTypes.PROPERTY.getId();
break;
case TYPE_DEFINITION:
Node node = markdownParameterDocumentationLineNode.parent().parent().parent();
type = TokenTypes.TYPE_PARAMETER.getId();
if (node instanceof TypeDefinitionNode) {
SyntaxKind kind = ((TypeDefinitionNode) node).typeDescriptor().kind();
if (kind == SyntaxKind.OBJECT_TYPE_DESC || kind == SyntaxKind.RECORD_TYPE_DESC) {
type = TokenTypes.PROPERTY.getId();
}
}
break;
default:
type = TokenTypes.PARAMETER.getId();
break;
}
this.addSemanticToken(markdownParameterDocumentationLineNode.parameterName(), type,
TokenTypeModifiers.DOCUMENTATION.getId(), false, -1, -1);
}
visitSyntaxNode(markdownParameterDocumentationLineNode);
}
public void visit(TypeDefinitionNode typeDefinitionNode) {
int type = TokenTypes.TYPE.getId();
int modifiers = 0;
Node typeDescriptor = typeDefinitionNode.typeDescriptor();
switch (typeDescriptor.kind()) {
case OBJECT_TYPE_DESC:
type = TokenTypes.INTERFACE.getId();
modifiers = TokenTypeModifiers.DECLARATION.getId();
break;
case RECORD_TYPE_DESC:
type = TokenTypes.STRUCT.getId();
modifiers = TokenTypeModifiers.DECLARATION.getId();
break;
case INTERSECTION_TYPE_DESC:
if (typeDescriptor instanceof IntersectionTypeDescriptorNode) {
IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) typeDescriptor;
SyntaxKind left = intSecDescriptor.leftTypeDesc().kind();
SyntaxKind right = intSecDescriptor.rightTypeDesc().kind();
if (left == SyntaxKind.RECORD_TYPE_DESC || right == SyntaxKind.RECORD_TYPE_DESC) {
type = TokenTypes.STRUCT.getId();
}
if (left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC) {
modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId();
} else {
modifiers = TokenTypeModifiers.DECLARATION.getId();
}
}
break;
default:
type = TokenTypes.TYPE.getId();
modifiers = TokenTypeModifiers.DECLARATION.getId();
break;
}
this.addSemanticToken(typeDefinitionNode.typeName(), type, modifiers, true, type, 0);
visitSyntaxNode(typeDefinitionNode);
}
public void visit(RecordFieldNode recordFieldNode) {
Token token = recordFieldNode.fieldName();
LinePosition startLine = token.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset());
if (!semanticTokens.contains(semanticToken)) {
int length = token.text().trim().length();
int modifiers;
int refModifiers;
if (recordFieldNode.readonlyKeyword().isPresent()) {
modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId();
refModifiers = TokenTypeModifiers.READONLY.getId();
} else {
modifiers = TokenTypeModifiers.DECLARATION.getId();
refModifiers = 0;
}
semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers);
semanticTokens.add(semanticToken);
handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers);
}
visitSyntaxNode(recordFieldNode);
}
public void visit(RecordFieldWithDefaultValueNode recordFieldWithDefaultValueNode) {
Token token = recordFieldWithDefaultValueNode.fieldName();
LinePosition startLine = token.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset());
if (!semanticTokens.contains(semanticToken)) {
int length = token.text().trim().length();
int modifiers;
int refModifiers;
if (recordFieldWithDefaultValueNode.readonlyKeyword().isPresent()) {
modifiers = TokenTypeModifiers.DECLARATION.getId() | TokenTypeModifiers.READONLY.getId();
refModifiers = TokenTypeModifiers.READONLY.getId();
} else {
modifiers = TokenTypeModifiers.DECLARATION.getId();
refModifiers = 0;
}
semanticToken.setProperties(length, TokenTypes.PROPERTY.getId(), modifiers);
semanticTokens.add(semanticToken);
handleReferences(startLine, length, TokenTypes.PROPERTY.getId(), refModifiers);
}
visitSyntaxNode(recordFieldWithDefaultValueNode);
}
public void visit(SpecificFieldNode specificFieldNode) {
processSymbols(specificFieldNode.fieldName(),
specificFieldNode.fieldName().location().lineRange().startLine());
visitSyntaxNode(specificFieldNode);
}
public void visit(ObjectFieldNode objectFieldNode) {
SyntaxKind kind = objectFieldNode.parent().kind();
int type = kind == SyntaxKind.CLASS_DEFINITION || kind == SyntaxKind.OBJECT_TYPE_DESC ||
kind == SyntaxKind.RECORD_TYPE_DESC || kind == SyntaxKind.OBJECT_CONSTRUCTOR ?
TokenTypes.PROPERTY.getId() : TokenTypes.TYPE_PARAMETER.getId();
boolean isReadOnly = isReadonly(objectFieldNode.typeName());
this.addSemanticToken(objectFieldNode.fieldName(), type, isReadOnly ? TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId(), true, type,
isReadOnly ? TokenTypeModifiers.READONLY.getId() : 0);
visitSyntaxNode(objectFieldNode);
}
public void visit(AnnotationDeclarationNode annotationDeclarationNode) {
this.addSemanticToken(annotationDeclarationNode.annotationTag(), TokenTypes.TYPE.getId(),
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.TYPE.getId(), 0);
visitSyntaxNode(annotationDeclarationNode);
}
public void visit(DefaultableParameterNode defaultableParameterNode) {
defaultableParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token,
TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.PARAMETER.getId(), 0));
visitSyntaxNode(defaultableParameterNode);
}
public void visit(IncludedRecordParameterNode includedRecordParameterNode) {
includedRecordParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token,
TokenTypes.PARAMETER.getId(), TokenTypeModifiers.DECLARATION.getId(), true,
TokenTypes.PARAMETER.getId(), 0));
visitSyntaxNode(includedRecordParameterNode);
}
public void visit(RestParameterNode restParameterNode) {
restParameterNode.paramName().ifPresent(token -> this.addSemanticToken(token, TokenTypes.PARAMETER.getId(),
TokenTypeModifiers.DECLARATION.getId(), true, TokenTypes.PARAMETER.getId(), 0));
visitSyntaxNode(restParameterNode);
}
public void visit(NamedArgumentNode namedArgumentNode) {
this.addSemanticToken(namedArgumentNode.argumentName(), TokenTypes.VARIABLE.getId(),
TokenTypeModifiers.DECLARATION.getId(), false, -1, -1);
visitSyntaxNode(namedArgumentNode);
}
/**
* Returns if the given IntersectionTypeDescriptorNode has a readonly typeDescriptor.
*
* @param node Current node
* @return True if a readonly typeDescriptor is present, false otherwise.
*/
private boolean isReadonly(Node node) {
if (node instanceof IntersectionTypeDescriptorNode) {
IntersectionTypeDescriptorNode intSecDescriptor = (IntersectionTypeDescriptorNode) node;
SyntaxKind left = intSecDescriptor.leftTypeDesc().kind();
SyntaxKind right = intSecDescriptor.rightTypeDesc().kind();
return left == SyntaxKind.READONLY_TYPE_DESC || right == SyntaxKind.READONLY_TYPE_DESC;
}
return false;
}
/**
* Get the symbol of the given node and process the semantic tokens for the symbol and it's references.
*
* @param node Current node
* @param startLine Start line position
*/
private void processSymbols(Node node, LinePosition startLine) {
if (semanticTokens.contains(new SemanticToken(startLine.line(), startLine.offset()))) {
return;
}
Optional<SemanticModel> semanticModel = this.semanticTokensContext.currentSemanticModel();
if (semanticModel.isEmpty()) {
return;
}
Optional<Symbol> symbol = semanticModel.get().symbol(node);
if (symbol.isEmpty() || symbol.get().getLocation().isEmpty()) {
return;
}
LineRange symbolLineRange = symbol.get().getLocation().get().lineRange();
LinePosition linePosition = symbolLineRange.startLine();
SymbolKind kind = symbol.get().kind();
String nodeName = node.toString().trim();
if (nodeName.equals(SemanticTokensConstants.SELF)) {
return;
}
int declarationType = -1, declarationModifiers = -1, referenceType = -1, referenceModifiers = -1;
switch (kind) {
case CLASS:
declarationType = TokenTypes.CLASS.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.CLASS.getId();
break;
case CLASS_FIELD:
declarationType = TokenTypes.PROPERTY.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.PROPERTY.getId();
break;
case CONSTANT:
declarationType = TokenTypes.VARIABLE.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceType = TokenTypes.VARIABLE.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
break;
case VARIABLE:
boolean isReadonly = ((VariableSymbol) symbol.get()).typeDescriptor().typeKind() ==
TypeDescKind.INTERSECTION && ((IntersectionTypeSymbol) ((VariableSymbol) symbol.get())
.typeDescriptor()).memberTypeDescriptors().stream()
.anyMatch(desc -> desc.typeKind() == TypeDescKind.READONLY);
declarationType = TokenTypes.VARIABLE.getId();
declarationModifiers = isReadonly ? TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId() : TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.VARIABLE.getId();
referenceModifiers = isReadonly ? TokenTypeModifiers.READONLY.getId() : 0;
break;
case TYPE:
if (symbol.get() instanceof TypeReferenceTypeSymbol) {
TypeSymbol typeDescriptor = ((TypeReferenceTypeSymbol) symbol.get()).typeDescriptor();
int type = TokenTypes.TYPE.getId();
switch (typeDescriptor.kind()) {
case CLASS:
type = TokenTypes.CLASS.getId();
if (typeDescriptor instanceof ClassSymbol &&
((ClassSymbol) typeDescriptor).qualifiers().contains(Qualifier.READONLY)) {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
} else {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
break;
case TYPE:
switch (typeDescriptor.typeKind()) {
case RECORD:
type = TokenTypes.STRUCT.getId();
break;
case OBJECT:
type = TokenTypes.INTERFACE.getId();
break;
case INTERSECTION:
IntersectionTypeSymbol intSecSymbol =
(IntersectionTypeSymbol) typeDescriptor;
if (intSecSymbol.effectiveTypeDescriptor().typeKind() ==
TypeDescKind.RECORD) {
type = TokenTypes.STRUCT.getId();
if (intSecSymbol.memberTypeDescriptors().stream().anyMatch(desc ->
desc.typeKind() == TypeDescKind.READONLY)) {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
} else {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
}
break;
case UNION:
if (((TypeReferenceTypeSymbol) symbol.get()).definition().kind() ==
SymbolKind.ENUM) {
type = TokenTypes.ENUM.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
break;
default:
type = TokenTypes.TYPE.getId();
break;
}
break;
default:
type = TokenTypes.TYPE.getId();
break;
}
declarationType = type;
referenceType = type;
} else {
declarationType = TokenTypes.TYPE.getId();
referenceType = TokenTypes.TYPE.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
break;
case RECORD_FIELD:
declarationType = TokenTypes.PROPERTY.getId();
referenceType = TokenTypes.PROPERTY.getId();
if (symbol.get() instanceof RecordFieldSymbol &&
((RecordFieldSymbol) symbol.get()).qualifiers().contains(Qualifier.READONLY)) {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
} else {
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
}
break;
case ENUM_MEMBER:
declarationType = TokenTypes.ENUM_MEMBER.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId() |
TokenTypeModifiers.READONLY.getId();
referenceType = TokenTypes.ENUM_MEMBER.getId();
referenceModifiers = TokenTypeModifiers.READONLY.getId();
break;
case FUNCTION:
declarationType = TokenTypes.FUNCTION.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.FUNCTION.getId();
break;
case METHOD:
declarationType = TokenTypes.METHOD.getId();
declarationModifiers = TokenTypeModifiers.DECLARATION.getId();
referenceType = TokenTypes.METHOD.getId();
break;
case ANNOTATION:
this.addSemanticToken(node, TokenTypes.TYPE.getId(), 0, false, -1, -1);
break;
default:
break;
}
Optional<String> path =
this.semanticTokensContext.workspace().relativePath(this.semanticTokensContext.filePath());
if (path.isEmpty()) {
return;
}
if (declarationType != -1) {
if (symbolLineRange.filePath().equals(path.get()) && symbol.get().getModule().isPresent() &&
symbol.get().getModule().get().getName().isPresent() && this.semanticTokensContext
.currentModule().isPresent() && symbol.get().getModule().get().getName().get()
.equals(this.semanticTokensContext.currentModule().get().moduleId().moduleName())) {
SemanticToken semanticToken = new SemanticToken(linePosition.line(), linePosition.offset());
if (!semanticTokens.contains(semanticToken)) {
semanticToken.setProperties(node.textRange().length(), declarationType,
declarationModifiers == -1 ? 0 : declarationModifiers);
semanticTokens.add(semanticToken);
}
}
}
if (referenceType != -1) {
final int type = referenceType;
final int modifiers = referenceModifiers == -1 ? 0 : referenceModifiers;
semanticModel.get().references(symbol.get(), false).stream().filter(location ->
location != null && location.lineRange().filePath().equals(path.get()))
.forEach(location -> {
LinePosition position = location.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(position.line(), position.offset());
if (!semanticTokens.contains(semanticToken)) {
semanticToken.setProperties(node.textRange().length(), type, modifiers);
semanticTokens.add(semanticToken);
}
});
}
}
/**
* Adds a semantic token instance into the semanticTokens set for the given node.
*
* @param node Current node
* @param type Semantic token type's index
* @param modifiers Semantic token type modifiers' index
* @param processReferences True if node references should be processed, false otherwise
* @param refType Reference's semantic token type's index
* @param refModifiers Reference's semantic token type modifiers' index
*/
private void addSemanticToken(Node node, int type, int modifiers, boolean processReferences, int refType,
int refModifiers) {
LinePosition startLine = node.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(startLine.line(), startLine.offset());
if (!semanticTokens.contains(semanticToken)) {
int length = node instanceof Token ? ((Token) node).text().trim().length() : node.textRange().length();
semanticToken.setProperties(length, type, modifiers);
semanticTokens.add(semanticToken);
if (processReferences) {
handleReferences(startLine, length, refType, refModifiers);
}
}
}
/**
* Handles references of the node that is located in the given position.
*
* @param linePosition Start position of the node
* @param length Length to highlight
* @param type Semantic token type's index
* @param modifiers Semantic token type modifiers' index
*/
private void handleReferences(LinePosition linePosition, int length, int type, int modifiers) {
Optional<SemanticModel> semanticModel = this.semanticTokensContext.currentSemanticModel();
if (semanticModel.isEmpty()) {
return;
}
Optional<Document> docOptional = this.semanticTokensContext.currentDocument();
if (docOptional.isEmpty()) {
return;
}
Optional<String> path =
this.semanticTokensContext.workspace().relativePath(this.semanticTokensContext.filePath());
if (path.isEmpty()) {
return;
}
semanticModel.get().references(docOptional.get(), linePosition).stream().filter(location ->
location.lineRange().filePath().equals(path.get())).forEach(location -> {
LinePosition position = location.lineRange().startLine();
SemanticToken semanticToken = new SemanticToken(position.line(), position.offset());
if (!semanticTokens.contains(semanticToken)) {
semanticToken.setProperties(length, type, modifiers);
semanticTokens.add(semanticToken);
}
});
}
/**
* Represents semantic token data for a node.
*/
static class SemanticToken implements Comparable<SemanticToken> {
private final int line;
private final int column;
private int length;
private int type;
private int modifiers;
private SemanticToken(int line, int column) {
this.line = line;
this.column = column;
}
private SemanticToken(int line, int column, int length, int type, int modifiers) {
this.line = line;
this.column = column;
this.length = length;
this.type = type;
this.modifiers = modifiers;
}
private int getLine() {
return line;
}
private int getColumn() {
return column;
}
private int getLength() {
return length;
}
private int getType() {
return type;
}
private int getModifiers() {
return modifiers;
}
public void setProperties(int length, int type, int modifiers) {
this.length = length;
this.type = type;
this.modifiers = modifiers;
}
public SemanticToken processSemanticToken(List<Integer> data, SemanticToken previousToken) {
int line = this.getLine();
int column = this.getColumn();
int prevTokenLine = line;
int prevTokenColumn = column;
if (previousToken != null) {
if (line == previousToken.getLine()) {
column -= previousToken.getColumn();
}
line -= previousToken.getLine();
}
data.add(line);
data.add(column);
data.add(this.getLength());
data.add(this.getType());
data.add(this.getModifiers());
return new SemanticToken(prevTokenLine, prevTokenColumn);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SemanticToken semanticToken = (SemanticToken) obj;
return line == semanticToken.line && column == semanticToken.column;
}
@Override
public int hashCode() {
return Objects.hash(line, column);
}
@Override
public int compareTo(SemanticToken semanticToken) {
if (this.line == semanticToken.line) {
return this.column - semanticToken.column;
}
return this.line - semanticToken.line;
}
public static Comparator<SemanticToken> semanticTokenComparator = SemanticToken::compareTo;
}
} |
Ok I'll revert to having 2 separate classes and a base class 👍 | public boolean start() throws IOException {
restClient = source.spec.getConnectionConfiguration().createClient();
String query = source.spec.getQuery() != null ? source.spec.getQuery().get() : null;
if (query == null) {
query = "{\"query\": { \"match_all\": {} }}";
}
if ((source.backendVersion >= 5) && source.numSlices != null && source.numSlices > 1) {
String sliceQuery =
String.format("\"slice\": {\"id\": %s,\"max\": %s}", source.sliceId, source.numSlices);
query = query.replaceFirst("\\{", "{" + sliceQuery + ",");
}
String endPoint =
String.format(
"/%s/%s/_search",
source.spec.getConnectionConfiguration().getIndex(),
source.spec.getConnectionConfiguration().getType());
Map<String, String> params = new HashMap<>();
params.put("scroll", source.spec.getScrollKeepalive());
if (source.backendVersion == 2) {
params.put("size", String.valueOf(source.spec.getBatchSize()));
if (source.shardPreference != null) {
params.put("preference", "_shards:" + source.shardPreference);
}
}
HttpEntity queryEntity = new NStringEntity(query, ContentType.APPLICATION_JSON);
Request request = new Request("GET", endPoint);
request.addParameters(params);
request.setEntity(queryEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
private void updateScrollId(JsonNode searchResult) {
scrollId = searchResult.path("_scroll_id").asText();
}
@Override
public boolean advance() throws IOException {
if (batchIterator.hasNext()) {
current = batchIterator.next();
return true;
} else {
String requestBody =
String.format(
"{\"scroll\" : \"%s\",\"scroll_id\" : \"%s\"}",
source.spec.getScrollKeepalive(), scrollId);
HttpEntity scrollEntity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
Request request = new Request("GET", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(scrollEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
}
private boolean readNextBatchAndReturnFirstDocument(JsonNode searchResult) {
JsonNode hits = searchResult.path("hits").path("hits");
if (hits.size() == 0) {
current = null;
batchIterator = null;
return false;
}
List<String> batch = new ArrayList<>();
boolean withMetadata = source.spec.isWithMetadata();
for (JsonNode hit : hits) {
if (withMetadata) {
batch.add(hit.toString());
} else {
String document = hit.path("_source").toString();
batch.add(document);
}
}
batchIterator = batch.listIterator();
current = batchIterator.next();
return true;
}
@Override
public String getCurrent() throws NoSuchElementException {
if (current == null) {
throw new NoSuchElementException();
}
return current;
}
@Override
public void close() throws IOException {
String requestBody = String.format("{\"scroll_id\" : [\"%s\"]}", scrollId);
HttpEntity entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
try {
Request request = new Request("DELETE", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(entity);
restClient.performRequest(request);
} finally {
if (restClient != null) {
restClient.close();
}
}
}
@Override
public BoundedSource<String> getCurrentSource() {
return source;
}
}
/**
* A POJO encapsulating a configuration for retry behavior when issuing requests to ES. A retry
* will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes first, for
* 429 TOO_MANY_REQUESTS error.
*/
@AutoValue
public abstract static class RetryConfiguration implements Serializable {
@VisibleForTesting
static final RetryPredicate DEFAULT_RETRY_PREDICATE = new DefaultRetryPredicate();
abstract int getMaxAttempts();
abstract Duration getMaxDuration();
abstract RetryPredicate getRetryPredicate();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxAttempts(int maxAttempts);
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxDuration(Duration maxDuration);
abstract ElasticsearchIO.RetryConfiguration.Builder setRetryPredicate(
RetryPredicate retryPredicate);
abstract ElasticsearchIO.RetryConfiguration build();
}
/**
* Creates RetryConfiguration for {@link ElasticsearchIO} with provided maxAttempts,
* maxDurations and exponential backoff based retries.
*
* @param maxAttempts max number of attempts.
* @param maxDuration maximum duration for retries.
* @return {@link RetryConfiguration} object with provided settings.
*/
public static RetryConfiguration create(int maxAttempts, Duration maxDuration) {
checkArgument(maxAttempts > 0, "maxAttempts must be greater than 0");
checkArgument(
maxDuration != null && maxDuration.isLongerThan(Duration.ZERO),
"maxDuration must be greater than 0");
return new AutoValue_ElasticsearchIO_RetryConfiguration.Builder()
.setMaxAttempts(maxAttempts)
.setMaxDuration(maxDuration)
.setRetryPredicate(DEFAULT_RETRY_PREDICATE)
.build();
}
@VisibleForTesting
RetryConfiguration withRetryPredicate(RetryPredicate predicate) {
checkArgument(predicate != null, "predicate must be provided");
return builder().setRetryPredicate(predicate).build();
}
/**
* An interface used to control if we retry the Elasticsearch call when a {@link Response} is
* obtained. If {@link RetryPredicate
* the requests to the Elasticsearch server if the {@link RetryConfiguration} permits it.
*/
@FunctionalInterface
interface RetryPredicate extends Predicate<HttpEntity>, Serializable {}
/**
* This is the default predicate used to test if a failed ES operation should be retried. A
* retry will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes
* first, for TOO_MANY_REQUESTS(429) error.
*/
@VisibleForTesting
static class DefaultRetryPredicate implements RetryPredicate {
private int errorCode;
DefaultRetryPredicate(int code) {
this.errorCode = code;
}
DefaultRetryPredicate() {
this(429);
}
/** Returns true if the response has the error code for any mutation. */
private static boolean errorCodePresent(HttpEntity responseEntity, int errorCode) {
try {
JsonNode json = parseResponse(responseEntity);
if (json.path("errors").asBoolean()) {
for (JsonNode item : json.path("items")) {
if (item.findValue("status").asInt() == errorCode) {
return true;
}
}
}
} catch (IOException e) {
LOG.warn("Could not extract error codes from responseEntity {}", responseEntity);
}
return false;
}
@Override
public boolean test(HttpEntity responseEntity) {
return errorCodePresent(responseEntity, errorCode);
}
}
}
/** A {@link PTransform} converting docs to their Bulk API counterparts. */
@AutoValue
public abstract static class DocToBulk
extends PTransform<PCollection<String>, PCollection<String>> {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final int DEFAULT_RETRY_ON_CONFLICT = 5;
static {
SimpleModule module = new SimpleModule();
module.addSerializer(DocumentMetadata.class, new DocumentMetadataSerializer());
OBJECT_MAPPER.registerModule(module);
}
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract Write.@Nullable FieldValueExtractFn getIdFn();
abstract Write.@Nullable FieldValueExtractFn getIndexFn();
abstract Write.@Nullable FieldValueExtractFn getRoutingFn();
abstract Write.@Nullable FieldValueExtractFn getTypeFn();
abstract Write.@Nullable FieldValueExtractFn getDocVersionFn();
abstract @Nullable String getDocVersionType();
abstract @Nullable String getUpsertScript();
abstract @Nullable Boolean getUsePartialUpdate();
abstract Write.@Nullable BooleanFieldValueExtractFn getIsDeleteFn();
abstract @Nullable Integer getBackendVersion();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setIdFn(Write.FieldValueExtractFn idFunction);
abstract Builder setIndexFn(Write.FieldValueExtractFn indexFn);
abstract Builder setRoutingFn(Write.FieldValueExtractFn routingFunction);
abstract Builder setTypeFn(Write.FieldValueExtractFn typeFn);
abstract Builder setDocVersionFn(Write.FieldValueExtractFn docVersionFn);
abstract Builder setDocVersionType(String docVersionType);
abstract Builder setIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn);
abstract Builder setUsePartialUpdate(Boolean usePartialUpdate);
abstract Builder setUpsertScript(String source);
abstract Builder setBackendVersion(Integer assumedBackendVersion);
abstract DocToBulk build();
}
/**
* Provide the Elasticsearch connection configuration object. Only required if
* withBackendVersion was not used i.e. getBackendVersion() returns null.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link DocToBulk} with connection configuration set
*/
public DocToBulk withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a function to extract the id from the document. This id will be used as the document
* id in Elasticsearch. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param idFn to extract the document ID
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIdFn(Write.FieldValueExtractFn idFn) {
checkArgument(idFn != null, "idFn must not be null");
return builder().setIdFn(idFn).build();
}
/**
* Provide a function to extract the target index from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param indexFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIndexFn(Write.FieldValueExtractFn indexFn) {
checkArgument(indexFn != null, "indexFn must not be null");
return builder().setIndexFn(indexFn).build();
}
/**
* Provide a function to extract the target routing from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param routingFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withRoutingFn(Write.FieldValueExtractFn routingFn) {
checkArgument(routingFn != null, "routingFn must not be null");
return builder().setRoutingFn(routingFn).build();
}
/**
* Provide a function to extract the target type from the document allowing for dynamic document
* routing. Should the function throw an Exception then the batch will fail and the exception
* propagated. Users are encouraged to consider carefully if multipe types are a sensible model
* <a
* href="https:
* discussed in this blog</a>.
*
* @param typeFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withTypeFn(Write.FieldValueExtractFn typeFn) {
checkArgument(typeFn != null, "typeFn must not be null");
return builder().setTypeFn(typeFn).build();
}
/**
* Provide an instruction to control whether partial updates or inserts (default) are issued to
* Elasticsearch.
*
* @param usePartialUpdate set to true to issue partial updates
* @return the {@link DocToBulk} with the partial update control set
*/
public DocToBulk withUsePartialUpdate(boolean usePartialUpdate) {
return builder().setUsePartialUpdate(usePartialUpdate).build();
}
/**
* Whether to use scripted updates and what script to use.
*
* @param source set to the value of the script source, painless lang
* @return the {@link DocToBulk} with the scripted updates set
*/
public DocToBulk withUpsertScript(String source) {
return builder().setUsePartialUpdate(false).setUpsertScript(source).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionFn to extract the document version
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withDocVersionFn(Write.FieldValueExtractFn docVersionFn) {
checkArgument(docVersionFn != null, "docVersionFn must not be null");
return builder().setDocVersionFn(docVersionFn).build();
}
/**
* Provide a function to extract the target operation either upsert or delete from the document
* fields allowing dynamic bulk operation decision. While using withIsDeleteFn, it should be
* taken care that the document's id extraction is defined using the withIdFn function or else
* IllegalArgumentException is thrown. Should the function throw an Exception then the batch
* will fail and the exception propagated.
*
* @param isDeleteFn set to true for deleting the specific document
* @return the {@link Write} with the function set
*/
public DocToBulk withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
checkArgument(isDeleteFn != null, "deleteFn is required");
return builder().setIsDeleteFn(isDeleteFn).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionType the version type to use, one of {@value ElasticsearchIO
* @return the {@link DocToBulk} with the doc version type set
*/
public DocToBulk withDocVersionType(String docVersionType) {
checkArgument(
VERSION_TYPES.contains(docVersionType),
"docVersionType must be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setDocVersionType(docVersionType).build();
}
/**
* Use to set explicitly which version of Elasticsearch the destination cluster is running.
* Providing this hint means there is no need for setting {@link
* DocToBulk
*
* @param backendVersion the major version number of the version of Elasticsearch being run in
* the cluster where documents will be indexed.
* @return the {@link DocToBulk} with the Elasticsearch major version number set
*/
public DocToBulk withBackendVersion(int backendVersion) {
checkArgument(
VALID_CLUSTER_VERSIONS.contains(backendVersion),
"Backend version may only be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setBackendVersion(backendVersion).build();
}
@Override
public PCollection<String> expand(PCollection<String> docs) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
Integer backendVersion = getBackendVersion();
Write.FieldValueExtractFn idFn = getIdFn();
Write.BooleanFieldValueExtractFn isDeleteFn = getIsDeleteFn();
checkState(
(backendVersion != null || connectionConfiguration != null),
"withBackendVersion() or withConnectionConfiguration() is required");
checkArgument(
isDeleteFn == null || idFn != null,
"Id needs to be specified by withIdFn for delete operation");
return docs.apply(ParDo.of(new DocToBulkFn(this)));
}
private static class DocumentMetadata implements Serializable {
final String index;
final String type;
final String id;
final Integer retryOnConflict;
final String routing;
final Integer backendVersion;
final String version;
final String versionType;
DocumentMetadata(
String index,
String type,
String id,
Integer retryOnConflict,
String routing,
Integer backendVersion,
String version,
String versionType) {
this.index = index;
this.id = id;
this.type = type;
this.retryOnConflict = retryOnConflict;
this.routing = routing;
this.backendVersion = backendVersion;
this.version = version;
this.versionType = versionType;
}
}
private static class DocumentMetadataSerializer extends StdSerializer<DocumentMetadata> {
private DocumentMetadataSerializer() {
super(DocumentMetadata.class);
}
@Override
public void serialize(DocumentMetadata value, JsonGenerator gen, SerializerProvider provider)
throws IOException {
gen.writeStartObject();
if (value.index != null) {
gen.writeStringField("_index", value.index);
}
if (value.type != null) {
gen.writeStringField("_type", value.type);
}
if (value.id != null) {
gen.writeStringField("_id", value.id);
}
if (value.routing != null) {
gen.writeStringField("routing", value.routing);
}
if (value.retryOnConflict != null && value.backendVersion <= 6) {
gen.writeNumberField("_retry_on_conflict", value.retryOnConflict);
}
if (value.retryOnConflict != null && value.backendVersion >= 7) {
gen.writeNumberField("retry_on_conflict", value.retryOnConflict);
}
if (value.version != null) {
gen.writeStringField("version", value.version);
}
if (value.versionType != null) {
gen.writeStringField("version_type", value.versionType);
}
gen.writeEndObject();
}
}
@VisibleForTesting
static String createBulkApiEntity(DocToBulk spec, String document, int backendVersion)
throws IOException {
String documentMetadata = "{}";
boolean isDelete = false;
if (spec.getIndexFn() != null || spec.getTypeFn() != null || spec.getIdFn() != null) {
JsonNode parsedDocument = OBJECT_MAPPER.readTree(document);
documentMetadata = getDocumentMetadata(spec, parsedDocument, backendVersion);
if (spec.getIsDeleteFn() != null) {
isDelete = spec.getIsDeleteFn().apply(parsedDocument);
}
}
if (isDelete) {
return String.format("{ \"delete\" : %s }%n", documentMetadata);
} else {
if (spec.getUsePartialUpdate()) {
return String.format(
"{ \"update\" : %s }%n{ \"doc\" : %s, " + "\"doc_as_upsert\" : true }%n",
documentMetadata, document);
} else if (spec.getUpsertScript() != null) {
return String.format(
"{ \"update\" : %s }%n{ \"script\" : {\"source\": \"%s\", "
+ "\"params\": %s}, \"upsert\" : %s }%n",
documentMetadata, spec.getUpsertScript(), document, document);
} else {
return String.format("{ \"index\" : %s }%n%s%n", documentMetadata, document);
}
}
}
private static String lowerCaseOrNull(String input) {
return input == null ? null : input.toLowerCase();
}
/**
* Extracts the components that comprise the document address from the document using the {@link
* Write.FieldValueExtractFn} configured. This allows any or all of the index, type and document
* id to be controlled on a per document basis. If none are provided then an empty default of
* {@code {}} is returned. Sanitization of the index is performed, automatically lower-casing
* the value as required by Elasticsearch.
*
* @param parsedDocument the json from which the index, type and id may be extracted
* @return the document address as JSON or the default
* @throws IOException if the document cannot be parsed as JSON
*/
private static String getDocumentMetadata(
DocToBulk spec, JsonNode parsedDocument, int backendVersion) throws IOException {
DocumentMetadata metadata =
new DocumentMetadata(
spec.getIndexFn() != null
? lowerCaseOrNull(spec.getIndexFn().apply(parsedDocument))
: null,
spec.getTypeFn() != null ? spec.getTypeFn().apply(parsedDocument) : null,
spec.getIdFn() != null ? spec.getIdFn().apply(parsedDocument) : null,
(spec.getUsePartialUpdate()
|| (spec.getUpsertScript() != null && !spec.getUpsertScript().isEmpty()))
? DEFAULT_RETRY_ON_CONFLICT
: null,
spec.getRoutingFn() != null ? spec.getRoutingFn().apply(parsedDocument) : null,
backendVersion,
spec.getDocVersionFn() != null ? spec.getDocVersionFn().apply(parsedDocument) : null,
spec.getDocVersionType());
return OBJECT_MAPPER.writeValueAsString(metadata);
}
/** {@link DoFn} to for the {@link DocToBulk} transform. */
@VisibleForTesting
static class DocToBulkFn extends DoFn<String, String> {
private final DocToBulk spec;
private int backendVersion;
public DocToBulkFn(DocToBulk spec) {
this.spec = spec;
}
@Setup
public void setup() throws IOException {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
if (spec.getBackendVersion() == null) {
backendVersion = ElasticsearchIO.getBackendVersion(connectionConfiguration);
} else {
backendVersion = spec.getBackendVersion();
}
}
@ProcessElement
public void processElement(ProcessContext c) throws IOException {
c.output(createBulkApiEntity(spec, c.element(), backendVersion));
}
}
}
/**
* A {@link PTransform} convenience wrapper for doing both document to bulk API serialization as
* well as batching those Bulk API entities and writing them to an Elasticsearch cluster. This
* class is effectively a thin proxy for DocToBulk->BulkIO all-in-one for convenience and backward
* compatibility.
*/
@AutoValue
public abstract static class Write extends PTransform<PCollection<String>, PDone> {
public interface FieldValueExtractFn extends SerializableFunction<JsonNode, String> {}
public interface BooleanFieldValueExtractFn extends SerializableFunction<JsonNode, Boolean> {}
public abstract DocToBulk getDocToBulk();
public abstract BulkIO getBulkIO();
abstract Builder writeBuilder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setDocToBulk(DocToBulk docToBulk);
abstract Builder setBulkIO(BulkIO bulkIO);
abstract Write build();
}
/** Refer to {@link DocToBulk
public Write withIdFn(FieldValueExtractFn idFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withIdFn(idFn)).build();
}
/** Refer to {@link DocToBulk
public Write withIndexFn(FieldValueExtractFn indexFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withIndexFn(indexFn)).build();
}
/** Refer to {@link DocToBulk
public Write withRoutingFn(FieldValueExtractFn routingFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withRoutingFn(routingFn)).build();
}
/** Refer to {@link DocToBulk
public Write withTypeFn(FieldValueExtractFn typeFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withTypeFn(typeFn)).build();
}
/** Refer to {@link DocToBulk
public Write withDocVersionFn(FieldValueExtractFn docVersionFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withDocVersionFn(docVersionFn)).build();
}
/** Refer to {@link DocToBulk
public Write withDocVersionType(String docVersionType) {
return writeBuilder().setDocToBulk(getDocToBulk().withDocVersionType(docVersionType)).build();
}
/** Refer to {@link DocToBulk
public Write withUsePartialUpdate(boolean usePartialUpdate) {
return writeBuilder()
.setDocToBulk(getDocToBulk().withUsePartialUpdate(usePartialUpdate))
.build();
}
/** Refer to {@link DocToBulk
public Write withUpsertScript(String source) {
return writeBuilder().setDocToBulk(getDocToBulk().withUpsertScript(source)).build();
}
/** Refer to {@link DocToBulk
public Write withBackendVersion(int backendVersion) {
return writeBuilder().setDocToBulk(getDocToBulk().withBackendVersion(backendVersion)).build();
}
/** Refer to {@link DocToBulk
public Write withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
return writeBuilder().setDocToBulk(getDocToBulk().withIsDeleteFn(isDeleteFn)).build();
}
/** Refer to {@link BulkIO
public Write withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return writeBuilder()
.setDocToBulk(getDocToBulk().withConnectionConfiguration(connectionConfiguration))
.setBulkIO(getBulkIO().withConnectionConfiguration(connectionConfiguration))
.build();
}
/** Refer to {@link BulkIO
public Write withMaxBatchSize(long batchSize) {
return writeBuilder().setBulkIO(getBulkIO().withMaxBatchSize(batchSize)).build();
}
/** Refer to {@link BulkIO
public Write withMaxBatchSizeBytes(long batchSizeBytes) {
return writeBuilder().setBulkIO(getBulkIO().withMaxBatchSizeBytes(batchSizeBytes)).build();
}
/** Refer to {@link BulkIO
public Write withRetryConfiguration(RetryConfiguration retryConfiguration) {
return writeBuilder()
.setBulkIO(getBulkIO().withRetryConfiguration(retryConfiguration))
.build();
}
/** Refer to {@link BulkIO
public Write withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
return writeBuilder()
.setBulkIO(getBulkIO().withIgnoreVersionConflicts(ignoreVersionConflicts))
.build();
}
/** Refer to {@link BulkIO
public Write withUseStatefulBatches(boolean useStatefulBatches) {
return writeBuilder()
.setBulkIO(getBulkIO().withUseStatefulBatches(useStatefulBatches))
.build();
}
/** Refer to {@link BulkIO
public Write withMaxBufferingDuration(Duration maxBufferingDuration) {
return writeBuilder()
.setBulkIO(getBulkIO().withMaxBufferingDuration(maxBufferingDuration))
.build();
}
/** Refer to {@link BulkIO
public Write withMaxParallelRquestsPerWindow(int maxParallelRquestsPerWindow) {
return writeBuilder()
.setBulkIO(getBulkIO().withMaxParallelRequestsPerWindow(maxParallelRquestsPerWindow))
.build();
}
/** Refer to {@link BulkIO
public Write withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrors) {
if (allowableResponseErrors == null) {
allowableResponseErrors = new HashSet<>();
}
return writeBuilder()
.setBulkIO(getBulkIO().withAllowableResponseErrors(allowableResponseErrors))
.build();
}
@Override
public PDone expand(PCollection<String> input) {
input.apply(getDocToBulk()).apply(getBulkIO());
return PDone.in(input.getPipeline());
}
}
/** A {@link PTransform} writing data to Elasticsearch. */
@AutoValue
public abstract static class BulkIO extends PTransform<PCollection<String>, PDone> {
@VisibleForTesting
static final String RETRY_ATTEMPT_LOG = "Error writing to Elasticsearch. Retry attempt[%d]";
@VisibleForTesting
static final String RETRY_FAILED_LOG =
"Error writing to ES after %d attempt(s). No more attempts allowed";
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract long getMaxBatchSize();
abstract long getMaxBatchSizeBytes();
abstract @Nullable Duration getMaxBufferingDuration();
abstract boolean getUseStatefulBatches();
abstract int getMaxParallelRequestsPerWindow();
abstract @Nullable RetryConfiguration getRetryConfiguration();
abstract @Nullable Set<String> getAllowedResponseErrors();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setMaxBatchSize(long maxBatchSize);
abstract Builder setMaxBatchSizeBytes(long maxBatchSizeBytes);
abstract Builder setRetryConfiguration(RetryConfiguration retryConfiguration);
abstract Builder setAllowedResponseErrors(Set<String> allowedResponseErrors);
abstract Builder setMaxBufferingDuration(Duration maxBufferingDuration);
abstract Builder setUseStatefulBatches(boolean useStatefulBatches);
abstract Builder setMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow);
abstract BulkIO build();
}
/**
* Provide the Elasticsearch connection configuration object.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link BulkIO} with connection configuration set
*/
public BulkIO withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a maximum size in number of documents for the batch see bulk API
* (https:
* docs (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSize maximum batch size in number of documents
* @return the {@link BulkIO} with connection batch size set
*/
public BulkIO withMaxBatchSize(long batchSize) {
checkArgument(batchSize > 0, "batchSize must be > 0, but was %s", batchSize);
return builder().setMaxBatchSize(batchSize).build();
}
/**
* Provide a maximum size in bytes for the batch see bulk API
* (https:
* (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSizeBytes maximum batch size in bytes
* @return the {@link BulkIO} with connection batch size in bytes set
*/
public BulkIO withMaxBatchSizeBytes(long batchSizeBytes) {
checkArgument(batchSizeBytes > 0, "batchSizeBytes must be > 0, but was %s", batchSizeBytes);
return builder().setMaxBatchSizeBytes(batchSizeBytes).build();
}
/**
* Provides configuration to retry a failed batch call to Elasticsearch. A batch is considered
* as failed if the underlying {@link RestClient} surfaces 429 HTTP status code as error for one
* or more of the items in the {@link Response}. Users should consider that retrying might
* compound the underlying problem which caused the initial failure. Users should also be aware
* that once retrying is exhausted the error is surfaced to the runner which <em>may</em> then
* opt to retry the current bundle in entirety or abort if the max number of retries of the
* runner is completed. Retrying uses an exponential backoff algorithm, with minimum backoff of
* 5 seconds and then surfacing the error once the maximum number of retries or maximum
* configuration duration is exceeded.
*
* <p>Example use:
*
* <pre>{@code
* ElasticsearchIO.write()
* .withRetryConfiguration(ElasticsearchIO.RetryConfiguration.create(10, Duration.standardMinutes(3))
* ...
* }</pre>
*
* @param retryConfiguration the rules which govern the retry behavior
* @return the {@link BulkIO} with retrying configured
*/
public BulkIO withRetryConfiguration(RetryConfiguration retryConfiguration) {
checkArgument(retryConfiguration != null, "retryConfiguration is required");
return builder().setRetryConfiguration(retryConfiguration).build();
}
/**
* Whether or not to suppress version conflict errors in a Bulk API response. This can be useful
* if your use case involves using external version types.
*
* @param ignoreVersionConflicts true to suppress version conflicts, false to surface version
* conflict errors.
* @return the {@link BulkIO} with version conflict handling configured
*/
public BulkIO withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
Set<String> allowedResponseErrors = getAllowedResponseErrors();
if (allowedResponseErrors == null) {
allowedResponseErrors = new HashSet<>();
}
if (ignoreVersionConflicts) {
allowedResponseErrors.add(VERSION_CONFLICT_ERROR);
}
return builder().setAllowedResponseErrors(allowedResponseErrors).build();
}
/**
* Provide a set of textual error types which can be contained in Bulk API response
* items[].error.type field. Any element in @param allowableResponseErrorTypes will suppress
* errors of the same type in Bulk responses.
*
* <p>See also
* https:
*
* @param allowableResponseErrorTypes
* @return the {@link BulkIO} with allowable response errors set
*/
public BulkIO withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrorTypes) {
if (allowableResponseErrorTypes == null) {
allowableResponseErrorTypes = new HashSet<>();
}
return builder().setAllowedResponseErrors(allowableResponseErrorTypes).build();
}
/**
* If using {@link BulkIO
* time before buffered elements are emitted to Elasticsearch as a Bulk API request. If this
* config is not set, Bulk requests will not be issued until {@link BulkIO
* number of documents have been buffered. This may result in higher latency in particular if
* your max batch size is set to a large value and your pipeline input is low volume.
*
* @param maxBufferingDuration the maximum duration to wait before sending any buffered
* documents to Elasticsearch, regardless of maxBatchSize.
* @return the {@link BulkIO} with maximum buffering duration set
*/
public BulkIO withMaxBufferingDuration(Duration maxBufferingDuration) {
LOG.warn(
"Use of withMaxBufferingDuration requires withUseStatefulBatches(true). "
+ "Setting that automatically.");
return builder()
.setUseStatefulBatches(true)
.setMaxBufferingDuration(maxBufferingDuration)
.build();
}
/**
* Whether or not to use Stateful Processing to ensure bulk requests have the desired number of
* entities i.e. as close to the maxBatchSize as possible. By default without this feature
* enabled, Bulk requests will not contain more than maxBatchSize entities, but the lower bound
* of batch size is determined by Beam Runner bundle sizes, which may be as few as 1.
*
* @param useStatefulBatches true enables the use of Stateful Processing to ensure that batches
* are as close to the maxBatchSize as possible.
* @return the {@link BulkIO} with Stateful Processing enabled or disabled
*/
public BulkIO withUseStatefulBatches(boolean useStatefulBatches) {
return builder().setUseStatefulBatches(useStatefulBatches).build();
}
/**
* When using {@link BulkIO
* batches are maintained per-key-per-window. If data is globally windowed and this
* configuration is set to 1, there will only ever be 1 request in flight. Having only a single
* request in flight can be beneficial for ensuring an Elasticsearch cluster is not overwhelmed
* by parallel requests, but may not work for all use cases. If this number is less than the
* number of maximum workers in your pipeline, the IO work may not be distributed across all
* workers.
*
* @param maxParallelRequestsPerWindow the maximum number of parallel bulk requests for a window
* of data
* @return the {@link BulkIO} with maximum parallel bulk requests per window set
*/
public BulkIO withMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow) {
checkArgument(
maxParallelRequestsPerWindow > 0, "parameter value must be positive " + "a integer");
return builder().setMaxParallelRequestsPerWindow(maxParallelRequestsPerWindow).build();
}
@Override
public PDone expand(PCollection<String> input) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
checkState(connectionConfiguration != null, "withConnectionConfiguration() is required");
if (getUseStatefulBatches()) {
GroupIntoBatches<Integer, String> groupIntoBatches =
GroupIntoBatches.ofSize(getMaxBatchSize());
if (getMaxBufferingDuration() != null) {
groupIntoBatches = groupIntoBatches.withMaxBufferingDuration(getMaxBufferingDuration());
}
input
.apply(ParDo.of(new AssignShardFn<>(getMaxParallelRequestsPerWindow())))
.apply(groupIntoBatches)
.apply(
"Remove key no longer needed",
MapElements.into(TypeDescriptors.iterables(TypeDescriptors.strings()))
.via(KV::getValue))
.apply(ParDo.of(new BulkIOFn(this)));
} else {
input
.apply(
"Make elements iterable",
MapElements.into(TypeDescriptors.iterables(TypeDescriptors.strings()))
.via(Collections::singletonList))
.apply(ParDo.of(new BulkIOFn(this)));
}
return PDone.in(input.getPipeline());
}
/** {@link DoFn} to for the {@link BulkIO} transform. */
@VisibleForTesting
static class BulkIOFn extends DoFn<Iterable<String>, Void> {
private static final Duration RETRY_INITIAL_BACKOFF = Duration.standardSeconds(5);
private transient FluentBackoff retryBackoff;
protected BulkIO spec;
private transient RestClient restClient;
protected ArrayList<String> batch;
long currentBatchSizeBytes;
@VisibleForTesting
BulkIOFn(BulkIO bulkSpec) {
this.spec = bulkSpec;
}
@Setup
public void setup() throws IOException {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
restClient = connectionConfiguration.createClient();
retryBackoff =
FluentBackoff.DEFAULT.withMaxRetries(0).withInitialBackoff(RETRY_INITIAL_BACKOFF);
if (spec.getRetryConfiguration() != null) {
retryBackoff =
FluentBackoff.DEFAULT
.withInitialBackoff(RETRY_INITIAL_BACKOFF)
.withMaxRetries(spec.getRetryConfiguration().getMaxAttempts() - 1)
.withMaxCumulativeBackoff(spec.getRetryConfiguration().getMaxDuration());
}
}
@StartBundle
public void startBundle(StartBundleContext context) {
batch = new ArrayList<>();
currentBatchSizeBytes = 0;
}
@FinishBundle
public void finishBundle(FinishBundleContext context)
throws IOException, InterruptedException {
flushBatch();
}
@ProcessElement
public void processElement(@Element @NonNull Iterable<String> bulkApiEntities)
throws Exception {
for (String bulkApiEntity : bulkApiEntities) {
addAndMaybeFlush(bulkApiEntity);
}
}
protected void addAndMaybeFlush(String bulkApiEntity)
throws IOException, InterruptedException {
batch.add(bulkApiEntity);
currentBatchSizeBytes += bulkApiEntity.getBytes(StandardCharsets.UTF_8).length;
if (batch.size() >= spec.getMaxBatchSize()
|| currentBatchSizeBytes >= spec.getMaxBatchSizeBytes()) {
flushBatch();
}
}
private void flushBatch() throws IOException, InterruptedException {
if (batch.isEmpty()) {
return;
}
LOG.info(
"ElasticsearchIO batch size: {}, batch size bytes: {}",
batch.size(),
currentBatchSizeBytes);
StringBuilder bulkRequest = new StringBuilder();
for (String json : batch) {
bulkRequest.append(json);
}
batch.clear();
currentBatchSizeBytes = 0L;
Response response = null;
HttpEntity responseEntity = null;
String endPoint = spec.getConnectionConfiguration().getBulkEndPoint();
HttpEntity requestBody =
new NStringEntity(bulkRequest.toString(), ContentType.APPLICATION_JSON);
try {
Request request = new Request("POST", endPoint);
request.addParameters(Collections.emptyMap());
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
if (spec.getRetryConfiguration() == null) {
throw ex;
}
LOG.error("Caught ES timeout, retrying", ex);
}
if (spec.getRetryConfiguration() != null
&& (response == null
|| responseEntity == null
|| spec.getRetryConfiguration().getRetryPredicate().test(responseEntity))) {
if (responseEntity != null
&& spec.getRetryConfiguration().getRetryPredicate().test(responseEntity)) {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
responseEntity = handleRetry("POST", endPoint, Collections.emptyMap(), requestBody);
}
checkForErrors(responseEntity, spec.getAllowedResponseErrors());
}
/** retry request based on retry configuration policy. */
private HttpEntity handleRetry(
String method, String endpoint, Map<String, String> params, HttpEntity requestBody)
throws IOException, InterruptedException {
Response response;
HttpEntity responseEntity;
Sleeper sleeper = Sleeper.DEFAULT;
BackOff backoff = retryBackoff.backoff();
int attempt = 0;
while (BackOffUtils.next(sleeper, backoff)) {
LOG.warn(String.format(RETRY_ATTEMPT_LOG, ++attempt));
try {
Request request = new Request(method, endpoint);
request.addParameters(params);
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
LOG.error("Caught ES timeout, retrying", ex);
continue;
}
if (!Objects.requireNonNull(spec.getRetryConfiguration())
.getRetryPredicate()
.test(responseEntity)) {
return responseEntity;
} else {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
}
throw new IOException(String.format(RETRY_FAILED_LOG, attempt));
}
@Teardown
public void closeClient() throws IOException {
if (restClient != null) {
restClient.close();
}
}
}
}
static int getBackendVersion(ConnectionConfiguration connectionConfiguration) {
try (RestClient restClient = connectionConfiguration.createClient()) {
Request request = new Request("GET", "");
Response response = restClient.performRequest(request);
JsonNode jsonNode = parseResponse(response.getEntity());
int backendVersion =
Integer.parseInt(jsonNode.path("version").path("number").asText().substring(0, 1));
checkArgument(
(VALID_CLUSTER_VERSIONS.contains(backendVersion)),
"The Elasticsearch version to connect to is %s.x. "
+ "This version of the ElasticsearchIO is only compatible with "
+ "Elasticsearch v7.x, v6.x, v5.x and v2.x",
backendVersion);
return backendVersion;
} catch (IOException e) {
throw new IllegalArgumentException("Cannot get Elasticsearch version", e);
}
}
} | MapElements.into(TypeDescriptors.iterables(TypeDescriptors.strings())) | public boolean start() throws IOException {
restClient = source.spec.getConnectionConfiguration().createClient();
String query = source.spec.getQuery() != null ? source.spec.getQuery().get() : null;
if (query == null) {
query = "{\"query\": { \"match_all\": {} }}";
}
if ((source.backendVersion >= 5) && source.numSlices != null && source.numSlices > 1) {
String sliceQuery =
String.format("\"slice\": {\"id\": %s,\"max\": %s}", source.sliceId, source.numSlices);
query = query.replaceFirst("\\{", "{" + sliceQuery + ",");
}
String endPoint =
String.format(
"/%s/%s/_search",
source.spec.getConnectionConfiguration().getIndex(),
source.spec.getConnectionConfiguration().getType());
Map<String, String> params = new HashMap<>();
params.put("scroll", source.spec.getScrollKeepalive());
if (source.backendVersion == 2) {
params.put("size", String.valueOf(source.spec.getBatchSize()));
if (source.shardPreference != null) {
params.put("preference", "_shards:" + source.shardPreference);
}
}
HttpEntity queryEntity = new NStringEntity(query, ContentType.APPLICATION_JSON);
Request request = new Request("GET", endPoint);
request.addParameters(params);
request.setEntity(queryEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
private void updateScrollId(JsonNode searchResult) {
scrollId = searchResult.path("_scroll_id").asText();
}
@Override
public boolean advance() throws IOException {
if (batchIterator.hasNext()) {
current = batchIterator.next();
return true;
} else {
String requestBody =
String.format(
"{\"scroll\" : \"%s\",\"scroll_id\" : \"%s\"}",
source.spec.getScrollKeepalive(), scrollId);
HttpEntity scrollEntity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
Request request = new Request("GET", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(scrollEntity);
Response response = restClient.performRequest(request);
JsonNode searchResult = parseResponse(response.getEntity());
updateScrollId(searchResult);
return readNextBatchAndReturnFirstDocument(searchResult);
}
}
private boolean readNextBatchAndReturnFirstDocument(JsonNode searchResult) {
JsonNode hits = searchResult.path("hits").path("hits");
if (hits.size() == 0) {
current = null;
batchIterator = null;
return false;
}
List<String> batch = new ArrayList<>();
boolean withMetadata = source.spec.isWithMetadata();
for (JsonNode hit : hits) {
if (withMetadata) {
batch.add(hit.toString());
} else {
String document = hit.path("_source").toString();
batch.add(document);
}
}
batchIterator = batch.listIterator();
current = batchIterator.next();
return true;
}
@Override
public String getCurrent() throws NoSuchElementException {
if (current == null) {
throw new NoSuchElementException();
}
return current;
}
@Override
public void close() throws IOException {
String requestBody = String.format("{\"scroll_id\" : [\"%s\"]}", scrollId);
HttpEntity entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
try {
Request request = new Request("DELETE", "/_search/scroll");
request.addParameters(Collections.emptyMap());
request.setEntity(entity);
restClient.performRequest(request);
} finally {
if (restClient != null) {
restClient.close();
}
}
}
@Override
public BoundedSource<String> getCurrentSource() {
return source;
}
}
/**
* A POJO encapsulating a configuration for retry behavior when issuing requests to ES. A retry
* will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes first, for
* 429 TOO_MANY_REQUESTS error.
*/
@AutoValue
public abstract static class RetryConfiguration implements Serializable {
@VisibleForTesting
static final RetryPredicate DEFAULT_RETRY_PREDICATE = new DefaultRetryPredicate();
abstract int getMaxAttempts();
abstract Duration getMaxDuration();
abstract RetryPredicate getRetryPredicate();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxAttempts(int maxAttempts);
abstract ElasticsearchIO.RetryConfiguration.Builder setMaxDuration(Duration maxDuration);
abstract ElasticsearchIO.RetryConfiguration.Builder setRetryPredicate(
RetryPredicate retryPredicate);
abstract ElasticsearchIO.RetryConfiguration build();
}
/**
* Creates RetryConfiguration for {@link ElasticsearchIO} with provided maxAttempts,
* maxDurations and exponential backoff based retries.
*
* @param maxAttempts max number of attempts.
* @param maxDuration maximum duration for retries.
* @return {@link RetryConfiguration} object with provided settings.
*/
public static RetryConfiguration create(int maxAttempts, Duration maxDuration) {
checkArgument(maxAttempts > 0, "maxAttempts must be greater than 0");
checkArgument(
maxDuration != null && maxDuration.isLongerThan(Duration.ZERO),
"maxDuration must be greater than 0");
return new AutoValue_ElasticsearchIO_RetryConfiguration.Builder()
.setMaxAttempts(maxAttempts)
.setMaxDuration(maxDuration)
.setRetryPredicate(DEFAULT_RETRY_PREDICATE)
.build();
}
@VisibleForTesting
RetryConfiguration withRetryPredicate(RetryPredicate predicate) {
checkArgument(predicate != null, "predicate must be provided");
return builder().setRetryPredicate(predicate).build();
}
/**
* An interface used to control if we retry the Elasticsearch call when a {@link Response} is
* obtained. If {@link RetryPredicate
* the requests to the Elasticsearch server if the {@link RetryConfiguration} permits it.
*/
@FunctionalInterface
interface RetryPredicate extends Predicate<HttpEntity>, Serializable {}
/**
* This is the default predicate used to test if a failed ES operation should be retried. A
* retry will be attempted until the maxAttempts or maxDuration is exceeded, whichever comes
* first, for TOO_MANY_REQUESTS(429) error.
*/
@VisibleForTesting
static class DefaultRetryPredicate implements RetryPredicate {
private int errorCode;
DefaultRetryPredicate(int code) {
this.errorCode = code;
}
DefaultRetryPredicate() {
this(429);
}
/** Returns true if the response has the error code for any mutation. */
private static boolean errorCodePresent(HttpEntity responseEntity, int errorCode) {
try {
JsonNode json = parseResponse(responseEntity);
if (json.path("errors").asBoolean()) {
for (JsonNode item : json.path("items")) {
if (item.findValue("status").asInt() == errorCode) {
return true;
}
}
}
} catch (IOException e) {
LOG.warn("Could not extract error codes from responseEntity {}", responseEntity);
}
return false;
}
@Override
public boolean test(HttpEntity responseEntity) {
return errorCodePresent(responseEntity, errorCode);
}
}
}
/** A {@link PTransform} converting docs to their Bulk API counterparts. */
@AutoValue
public abstract static class DocToBulk
extends PTransform<PCollection<String>, PCollection<String>> {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final int DEFAULT_RETRY_ON_CONFLICT = 5;
static {
SimpleModule module = new SimpleModule();
module.addSerializer(DocumentMetadata.class, new DocumentMetadataSerializer());
OBJECT_MAPPER.registerModule(module);
}
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract Write.@Nullable FieldValueExtractFn getIdFn();
abstract Write.@Nullable FieldValueExtractFn getIndexFn();
abstract Write.@Nullable FieldValueExtractFn getRoutingFn();
abstract Write.@Nullable FieldValueExtractFn getTypeFn();
abstract Write.@Nullable FieldValueExtractFn getDocVersionFn();
abstract @Nullable String getDocVersionType();
abstract @Nullable String getUpsertScript();
abstract @Nullable Boolean getUsePartialUpdate();
abstract Write.@Nullable BooleanFieldValueExtractFn getIsDeleteFn();
abstract @Nullable Integer getBackendVersion();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setIdFn(Write.FieldValueExtractFn idFunction);
abstract Builder setIndexFn(Write.FieldValueExtractFn indexFn);
abstract Builder setRoutingFn(Write.FieldValueExtractFn routingFunction);
abstract Builder setTypeFn(Write.FieldValueExtractFn typeFn);
abstract Builder setDocVersionFn(Write.FieldValueExtractFn docVersionFn);
abstract Builder setDocVersionType(String docVersionType);
abstract Builder setIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn);
abstract Builder setUsePartialUpdate(Boolean usePartialUpdate);
abstract Builder setUpsertScript(String source);
abstract Builder setBackendVersion(Integer assumedBackendVersion);
abstract DocToBulk build();
}
/**
* Provide the Elasticsearch connection configuration object. Only required if
* withBackendVersion was not used i.e. getBackendVersion() returns null.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link DocToBulk} with connection configuration set
*/
public DocToBulk withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a function to extract the id from the document. This id will be used as the document
* id in Elasticsearch. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param idFn to extract the document ID
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIdFn(Write.FieldValueExtractFn idFn) {
checkArgument(idFn != null, "idFn must not be null");
return builder().setIdFn(idFn).build();
}
/**
* Provide a function to extract the target index from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param indexFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withIndexFn(Write.FieldValueExtractFn indexFn) {
checkArgument(indexFn != null, "indexFn must not be null");
return builder().setIndexFn(indexFn).build();
}
/**
* Provide a function to extract the target routing from the document allowing for dynamic
* document routing. Should the function throw an Exception then the batch will fail and the
* exception propagated.
*
* @param routingFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withRoutingFn(Write.FieldValueExtractFn routingFn) {
checkArgument(routingFn != null, "routingFn must not be null");
return builder().setRoutingFn(routingFn).build();
}
/**
* Provide a function to extract the target type from the document allowing for dynamic document
* routing. Should the function throw an Exception then the batch will fail and the exception
* propagated. Users are encouraged to consider carefully if multipe types are a sensible model
* <a
* href="https:
* discussed in this blog</a>.
*
* @param typeFn to extract the destination index from
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withTypeFn(Write.FieldValueExtractFn typeFn) {
checkArgument(typeFn != null, "typeFn must not be null");
return builder().setTypeFn(typeFn).build();
}
/**
* Provide an instruction to control whether partial updates or inserts (default) are issued to
* Elasticsearch.
*
* @param usePartialUpdate set to true to issue partial updates
* @return the {@link DocToBulk} with the partial update control set
*/
public DocToBulk withUsePartialUpdate(boolean usePartialUpdate) {
return builder().setUsePartialUpdate(usePartialUpdate).build();
}
/**
* Whether to use scripted updates and what script to use.
*
* @param source set to the value of the script source, painless lang
* @return the {@link DocToBulk} with the scripted updates set
*/
public DocToBulk withUpsertScript(String source) {
if (getBackendVersion() == null || getBackendVersion() == 2) {
LOG.warn("Painless scripts are not supported on Elasticsearch clusters before version 5.0");
}
return builder().setUsePartialUpdate(false).setUpsertScript(source).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionFn to extract the document version
* @return the {@link DocToBulk} with the function set
*/
public DocToBulk withDocVersionFn(Write.FieldValueExtractFn docVersionFn) {
checkArgument(docVersionFn != null, "docVersionFn must not be null");
return builder().setDocVersionFn(docVersionFn).build();
}
/**
* Provide a function to extract the target operation either upsert or delete from the document
* fields allowing dynamic bulk operation decision. While using withIsDeleteFn, it should be
* taken care that the document's id extraction is defined using the withIdFn function or else
* IllegalArgumentException is thrown. Should the function throw an Exception then the batch
* will fail and the exception propagated.
*
* @param isDeleteFn set to true for deleting the specific document
* @return the {@link Write} with the function set
*/
public DocToBulk withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
checkArgument(isDeleteFn != null, "deleteFn is required");
return builder().setIsDeleteFn(isDeleteFn).build();
}
/**
* Provide a function to extract the doc version from the document. This version number will be
* used as the document version in Elasticsearch. Should the function throw an Exception then
* the batch will fail and the exception propagated. Incompatible with update operations and
* should only be used with withUsePartialUpdate(false)
*
* @param docVersionType the version type to use, one of {@value VERSION_TYPES}
* @return the {@link DocToBulk} with the doc version type set
*/
public DocToBulk withDocVersionType(String docVersionType) {
checkArgument(
VERSION_TYPES.contains(docVersionType),
"docVersionType must be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setDocVersionType(docVersionType).build();
}
/**
* Use to set explicitly which version of Elasticsearch the destination cluster is running.
* Providing this hint means there is no need for setting {@link
* DocToBulk
*
* <p>Note: if the value of @param backendVersion differs from the version the destination
* cluster is running, behavior is undefined and likely to yield errors.
*
* @param backendVersion the major version number of the version of Elasticsearch being run in
* the cluster where documents will be indexed.
* @return the {@link DocToBulk} with the Elasticsearch major version number set
*/
public DocToBulk withBackendVersion(int backendVersion) {
checkArgument(
VALID_CLUSTER_VERSIONS.contains(backendVersion),
"Backend version may only be one of " + "%s",
String.join(", ", VERSION_TYPES));
return builder().setBackendVersion(backendVersion).build();
}
@Override
public PCollection<String> expand(PCollection<String> docs) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
Integer backendVersion = getBackendVersion();
Write.FieldValueExtractFn idFn = getIdFn();
Write.BooleanFieldValueExtractFn isDeleteFn = getIsDeleteFn();
checkState(
(backendVersion != null || connectionConfiguration != null),
"withBackendVersion() or withConnectionConfiguration() is required");
checkArgument(
isDeleteFn == null || idFn != null,
"Id needs to be specified by withIdFn for delete operation");
return docs.apply(ParDo.of(new DocToBulkFn(this)));
}
private static class DocumentMetadata implements Serializable {
final String index;
final String type;
final String id;
final Integer retryOnConflict;
final String routing;
final Integer backendVersion;
final String version;
final String versionType;
DocumentMetadata(
String index,
String type,
String id,
Integer retryOnConflict,
String routing,
Integer backendVersion,
String version,
String versionType) {
this.index = index;
this.id = id;
this.type = type;
this.retryOnConflict = retryOnConflict;
this.routing = routing;
this.backendVersion = backendVersion;
this.version = version;
this.versionType = versionType;
}
}
private static class DocumentMetadataSerializer extends StdSerializer<DocumentMetadata> {
private DocumentMetadataSerializer() {
super(DocumentMetadata.class);
}
@Override
public void serialize(DocumentMetadata value, JsonGenerator gen, SerializerProvider provider)
throws IOException {
gen.writeStartObject();
if (value.index != null) {
gen.writeStringField("_index", value.index);
}
if (value.type != null) {
gen.writeStringField("_type", value.type);
}
if (value.id != null) {
gen.writeStringField("_id", value.id);
}
if (value.routing != null) {
gen.writeStringField("routing", value.routing);
}
if (value.retryOnConflict != null && value.backendVersion <= 6) {
gen.writeNumberField("_retry_on_conflict", value.retryOnConflict);
}
if (value.retryOnConflict != null && value.backendVersion >= 7) {
gen.writeNumberField("retry_on_conflict", value.retryOnConflict);
}
if (value.version != null) {
gen.writeStringField("version", value.version);
}
if (value.versionType != null) {
gen.writeStringField("version_type", value.versionType);
}
gen.writeEndObject();
}
}
@VisibleForTesting
static String createBulkApiEntity(DocToBulk spec, String document, int backendVersion)
throws IOException {
String documentMetadata = "{}";
boolean isDelete = false;
if (spec.getIndexFn() != null
|| spec.getTypeFn() != null
|| spec.getIdFn() != null
|| spec.getRoutingFn() != null) {
JsonNode parsedDocument = OBJECT_MAPPER.readTree(document);
documentMetadata = getDocumentMetadata(spec, parsedDocument, backendVersion);
if (spec.getIsDeleteFn() != null) {
isDelete = spec.getIsDeleteFn().apply(parsedDocument);
}
}
if (isDelete) {
return String.format("{ \"delete\" : %s }%n", documentMetadata);
} else {
if (spec.getUsePartialUpdate()) {
return String.format(
"{ \"update\" : %s }%n{ \"doc\" : %s, " + "\"doc_as_upsert\" : true }%n",
documentMetadata, document);
} else if (spec.getUpsertScript() != null) {
return String.format(
"{ \"update\" : %s }%n{ \"script\" : {\"source\": \"%s\", "
+ "\"params\": %s}, \"upsert\" : %s, \"scripted_upsert\": true}%n",
documentMetadata, spec.getUpsertScript(), document, document);
} else {
return String.format("{ \"index\" : %s }%n%s%n", documentMetadata, document);
}
}
}
private static String lowerCaseOrNull(String input) {
return input == null ? null : input.toLowerCase();
}
/**
* Extracts the components that comprise the document address from the document using the {@link
* Write.FieldValueExtractFn} configured. This allows any or all of the index, type and document
* id to be controlled on a per document basis. If none are provided then an empty default of
* {@code {}} is returned. Sanitization of the index is performed, automatically lower-casing
* the value as required by Elasticsearch.
*
* @param parsedDocument the json from which the index, type and id may be extracted
* @return the document address as JSON or the default
* @throws IOException if the document cannot be parsed as JSON
*/
private static String getDocumentMetadata(
DocToBulk spec, JsonNode parsedDocument, int backendVersion) throws IOException {
DocumentMetadata metadata =
new DocumentMetadata(
spec.getIndexFn() != null
? lowerCaseOrNull(spec.getIndexFn().apply(parsedDocument))
: null,
spec.getTypeFn() != null ? spec.getTypeFn().apply(parsedDocument) : null,
spec.getIdFn() != null ? spec.getIdFn().apply(parsedDocument) : null,
(spec.getUsePartialUpdate()
|| (spec.getUpsertScript() != null && !spec.getUpsertScript().isEmpty()))
? DEFAULT_RETRY_ON_CONFLICT
: null,
spec.getRoutingFn() != null ? spec.getRoutingFn().apply(parsedDocument) : null,
backendVersion,
spec.getDocVersionFn() != null ? spec.getDocVersionFn().apply(parsedDocument) : null,
spec.getDocVersionType());
return OBJECT_MAPPER.writeValueAsString(metadata);
}
/** {@link DoFn} to for the {@link DocToBulk} transform. */
@VisibleForTesting
static class DocToBulkFn extends DoFn<String, String> {
private final DocToBulk spec;
private int backendVersion;
public DocToBulkFn(DocToBulk spec) {
this.spec = spec;
}
@Setup
public void setup() throws IOException {
if (spec.getBackendVersion() != null) {
backendVersion = spec.getBackendVersion();
} else {
backendVersion = ElasticsearchIO.getBackendVersion(spec.getConnectionConfiguration());
}
}
@ProcessElement
public void processElement(ProcessContext c) throws IOException {
c.output(createBulkApiEntity(spec, c.element(), backendVersion));
}
}
}
/**
* A {@link PTransform} writing data to Elasticsearch.
*
* <p>This {@link PTransform} acts as a convenience wrapper for doing both document to bulk API
* serialization as well as batching those Bulk API entities and writing them to an Elasticsearch
* cluster. This class is effectively a thin proxy for DocToBulk->BulkIO all-in-one for
* convenience and backward compatibility.
*/
public static class Write extends PTransform<PCollection<String>, PDone> {
public interface FieldValueExtractFn extends SerializableFunction<JsonNode, String> {}
public interface BooleanFieldValueExtractFn extends SerializableFunction<JsonNode, Boolean> {}
private DocToBulk docToBulk =
new AutoValue_ElasticsearchIO_DocToBulk.Builder()
.setUsePartialUpdate(false)
.build();
private BulkIO bulkIO =
new AutoValue_ElasticsearchIO_BulkIO.Builder()
.setMaxBatchSize(1000L)
.setMaxBatchSizeBytes(5L * 1024L * 1024L)
.setUseStatefulBatches(false)
.setMaxParallelRequestsPerWindow(1)
.build();
public DocToBulk getDocToBulk() {
return docToBulk;
}
public BulkIO getBulkIO() {
return bulkIO;
}
/** Refer to {@link DocToBulk
public Write withIdFn(FieldValueExtractFn idFn) {
docToBulk = docToBulk.withIdFn(idFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withIndexFn(FieldValueExtractFn indexFn) {
docToBulk = docToBulk.withIndexFn(indexFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withRoutingFn(FieldValueExtractFn routingFn) {
docToBulk = docToBulk.withRoutingFn(routingFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withTypeFn(FieldValueExtractFn typeFn) {
docToBulk = docToBulk.withTypeFn(typeFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withDocVersionFn(FieldValueExtractFn docVersionFn) {
docToBulk = docToBulk.withDocVersionFn(docVersionFn);
return this;
}
/** Refer to {@link DocToBulk
public Write withDocVersionType(String docVersionType) {
docToBulk = docToBulk.withDocVersionType(docVersionType);
return this;
}
/** Refer to {@link DocToBulk
public Write withUsePartialUpdate(boolean usePartialUpdate) {
docToBulk = docToBulk.withUsePartialUpdate(usePartialUpdate);
return this;
}
/** Refer to {@link DocToBulk
public Write withUpsertScript(String source) {
docToBulk = docToBulk.withUpsertScript(source);
return this;
}
/** Refer to {@link DocToBulk
public Write withBackendVersion(int backendVersion) {
docToBulk = docToBulk.withBackendVersion(backendVersion);
return this;
}
/** Refer to {@link DocToBulk
public Write withIsDeleteFn(Write.BooleanFieldValueExtractFn isDeleteFn) {
docToBulk = docToBulk.withIsDeleteFn(isDeleteFn);
return this;
}
/** Refer to {@link BulkIO
public Write withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
docToBulk = docToBulk.withConnectionConfiguration(connectionConfiguration);
bulkIO = bulkIO.withConnectionConfiguration(connectionConfiguration);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxBatchSize(long batchSize) {
bulkIO = bulkIO.withMaxBatchSize(batchSize);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxBatchSizeBytes(long batchSizeBytes) {
bulkIO = bulkIO.withMaxBatchSizeBytes(batchSizeBytes);
return this;
}
/** Refer to {@link BulkIO
public Write withRetryConfiguration(RetryConfiguration retryConfiguration) {
bulkIO = bulkIO.withRetryConfiguration(retryConfiguration);
return this;
}
/** Refer to {@link BulkIO
public Write withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
bulkIO = bulkIO.withIgnoreVersionConflicts(ignoreVersionConflicts);
return this;
}
/** Refer to {@link BulkIO
public Write withUseStatefulBatches(boolean useStatefulBatches) {
bulkIO = bulkIO.withUseStatefulBatches(useStatefulBatches);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxBufferingDuration(Duration maxBufferingDuration) {
bulkIO = bulkIO.withMaxBufferingDuration(maxBufferingDuration);
return this;
}
/** Refer to {@link BulkIO
public Write withMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow) {
bulkIO = bulkIO.withMaxParallelRequestsPerWindow(maxParallelRequestsPerWindow);
return this;
}
/** Refer to {@link BulkIO
public Write withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrors) {
if (allowableResponseErrors == null) {
allowableResponseErrors = new HashSet<>();
}
bulkIO = bulkIO.withAllowableResponseErrors(allowableResponseErrors);
return this;
}
@Override
public PDone expand(PCollection<String> input) {
return input.apply(docToBulk).apply(bulkIO);
}
}
/**
* A {@link PTransform} writing Bulk API entities created by {@link ElasticsearchIO.DocToBulk} to
* an Elasticsearch cluster. Typically, using {@link ElasticsearchIO.Write} is preferred, whereas
* using {@link ElasticsearchIO.DocToBulk} and BulkIO separately is for advanced use cases such as
* mirroring data to multiple clusters or data lakes without recomputation.
*/
@AutoValue
public abstract static class BulkIO extends PTransform<PCollection<String>, PDone> {
@VisibleForTesting
static final String RETRY_ATTEMPT_LOG = "Error writing to Elasticsearch. Retry attempt[%d]";
@VisibleForTesting
static final String RETRY_FAILED_LOG =
"Error writing to ES after %d attempt(s). No more attempts allowed";
abstract @Nullable ConnectionConfiguration getConnectionConfiguration();
abstract long getMaxBatchSize();
abstract long getMaxBatchSizeBytes();
abstract @Nullable Duration getMaxBufferingDuration();
abstract boolean getUseStatefulBatches();
abstract int getMaxParallelRequestsPerWindow();
abstract @Nullable RetryConfiguration getRetryConfiguration();
abstract @Nullable Set<String> getAllowedResponseErrors();
abstract Builder builder();
@AutoValue.Builder
abstract static class Builder {
abstract Builder setConnectionConfiguration(ConnectionConfiguration connectionConfiguration);
abstract Builder setMaxBatchSize(long maxBatchSize);
abstract Builder setMaxBatchSizeBytes(long maxBatchSizeBytes);
abstract Builder setRetryConfiguration(RetryConfiguration retryConfiguration);
abstract Builder setAllowedResponseErrors(Set<String> allowedResponseErrors);
abstract Builder setMaxBufferingDuration(Duration maxBufferingDuration);
abstract Builder setUseStatefulBatches(boolean useStatefulBatches);
abstract Builder setMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow);
abstract BulkIO build();
}
/**
* Provide the Elasticsearch connection configuration object.
*
* @param connectionConfiguration the Elasticsearch {@link ConnectionConfiguration} object
* @return the {@link BulkIO} with connection configuration set
*/
public BulkIO withConnectionConfiguration(ConnectionConfiguration connectionConfiguration) {
checkArgument(connectionConfiguration != null, "connectionConfiguration can not be null");
return builder().setConnectionConfiguration(connectionConfiguration).build();
}
/**
* Provide a maximum size in number of documents for the batch see bulk API
* (https:
* docs (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSize maximum batch size in number of documents
* @return the {@link BulkIO} with connection batch size set
*/
public BulkIO withMaxBatchSize(long batchSize) {
checkArgument(batchSize > 0, "batchSize must be > 0, but was %s", batchSize);
return builder().setMaxBatchSize(batchSize).build();
}
/**
* Provide a maximum size in bytes for the batch see bulk API
* (https:
* (like Elasticsearch bulk size advice). See
* https:
* execution engine, size of bundles may vary, this sets the maximum size. Change this if you
* need to have smaller ElasticSearch bulks.
*
* @param batchSizeBytes maximum batch size in bytes
* @return the {@link BulkIO} with connection batch size in bytes set
*/
public BulkIO withMaxBatchSizeBytes(long batchSizeBytes) {
checkArgument(batchSizeBytes > 0, "batchSizeBytes must be > 0, but was %s", batchSizeBytes);
return builder().setMaxBatchSizeBytes(batchSizeBytes).build();
}
/**
* Provides configuration to retry a failed batch call to Elasticsearch. A batch is considered
* as failed if the underlying {@link RestClient} surfaces 429 HTTP status code as error for one
* or more of the items in the {@link Response}. Users should consider that retrying might
* compound the underlying problem which caused the initial failure. Users should also be aware
* that once retrying is exhausted the error is surfaced to the runner which <em>may</em> then
* opt to retry the current bundle in entirety or abort if the max number of retries of the
* runner is completed. Retrying uses an exponential backoff algorithm, with minimum backoff of
* 5 seconds and then surfacing the error once the maximum number of retries or maximum
* configuration duration is exceeded.
*
* <p>Example use:
*
* <pre>{@code
* ElasticsearchIO.write()
* .withRetryConfiguration(ElasticsearchIO.RetryConfiguration.create(10, Duration.standardMinutes(3))
* ...
* }</pre>
*
* @param retryConfiguration the rules which govern the retry behavior
* @return the {@link BulkIO} with retrying configured
*/
public BulkIO withRetryConfiguration(RetryConfiguration retryConfiguration) {
checkArgument(retryConfiguration != null, "retryConfiguration is required");
return builder().setRetryConfiguration(retryConfiguration).build();
}
/**
* Whether or not to suppress version conflict errors in a Bulk API response. This can be useful
* if your use case involves using external version types.
*
* @param ignoreVersionConflicts true to suppress version conflicts, false to surface version
* conflict errors.
* @return the {@link BulkIO} with version conflict handling configured
*/
public BulkIO withIgnoreVersionConflicts(boolean ignoreVersionConflicts) {
Set<String> allowedResponseErrors = getAllowedResponseErrors();
if (allowedResponseErrors == null) {
allowedResponseErrors = new HashSet<>();
}
if (ignoreVersionConflicts) {
allowedResponseErrors.add(VERSION_CONFLICT_ERROR);
}
return builder().setAllowedResponseErrors(allowedResponseErrors).build();
}
/**
* Provide a set of textual error types which can be contained in Bulk API response
* items[].error.type field. Any element in @param allowableResponseErrorTypes will suppress
* errors of the same type in Bulk responses.
*
* <p>See also
* https:
*
* @param allowableResponseErrorTypes
* @return the {@link BulkIO} with allowable response errors set
*/
public BulkIO withAllowableResponseErrors(@Nullable Set<String> allowableResponseErrorTypes) {
if (allowableResponseErrorTypes == null) {
allowableResponseErrorTypes = new HashSet<>();
}
return builder().setAllowedResponseErrors(allowableResponseErrorTypes).build();
}
/**
* If using {@link BulkIO
* time before buffered elements are emitted to Elasticsearch as a Bulk API request. If this
* config is not set, Bulk requests will not be issued until {@link BulkIO
* number of documents have been buffered. This may result in higher latency in particular if
* your max batch size is set to a large value and your pipeline input is low volume.
*
* @param maxBufferingDuration the maximum duration to wait before sending any buffered
* documents to Elasticsearch, regardless of maxBatchSize.
* @return the {@link BulkIO} with maximum buffering duration set
*/
public BulkIO withMaxBufferingDuration(Duration maxBufferingDuration) {
LOG.warn(
"Use of withMaxBufferingDuration requires withUseStatefulBatches(true). "
+ "Setting that automatically.");
return builder()
.setUseStatefulBatches(true)
.setMaxBufferingDuration(maxBufferingDuration)
.build();
}
/**
* Whether or not to use Stateful Processing to ensure bulk requests have the desired number of
* entities i.e. as close to the maxBatchSize as possible. By default without this feature
* enabled, Bulk requests will not contain more than maxBatchSize entities, but the lower bound
* of batch size is determined by Beam Runner bundle sizes, which may be as few as 1.
*
* @param useStatefulBatches true enables the use of Stateful Processing to ensure that batches
* are as close to the maxBatchSize as possible.
* @return the {@link BulkIO} with Stateful Processing enabled or disabled
*/
public BulkIO withUseStatefulBatches(boolean useStatefulBatches) {
return builder().setUseStatefulBatches(useStatefulBatches).build();
}
/**
* When using {@link BulkIO
* batches are maintained per-key-per-window. BE AWARE that low values for @param
* maxParallelRequestsPerWindow, in particular if the input data has a finite number of windows,
* can reduce parallelism greatly. If data is globally windowed and @param
* maxParallelRequestsPerWindow is set to 1,there will only ever be 1 request in flight. Having
* only a single request in flight can be beneficial for ensuring an Elasticsearch cluster is
* not overwhelmed by parallel requests,but may not work for all use cases. If this number is
* less than the number of maximum workers in your pipeline, the IO work will result in a
* sub-distribution of the last write step with most of the runners.
*
* @param maxParallelRequestsPerWindow the maximum number of parallel bulk requests for a window
* of data
* @return the {@link BulkIO} with maximum parallel bulk requests per window set
*/
public BulkIO withMaxParallelRequestsPerWindow(int maxParallelRequestsPerWindow) {
checkArgument(
maxParallelRequestsPerWindow > 0, "parameter value must be positive " + "a integer");
return builder().setMaxParallelRequestsPerWindow(maxParallelRequestsPerWindow).build();
}
/**
* Creates batches of documents using Stateful Processing based on user configurable settings of
* withMaxBufferingDuration and withMaxParallelRequestsPerWindow.
*
* <p>Mostly exists for testability of withMaxParallelRequestsPerWindow.
*/
@VisibleForTesting
static class StatefulBatching
extends PTransform<PCollection<String>, PCollection<KV<Integer, Iterable<String>>>> {
final BulkIO spec;
private StatefulBatching(BulkIO bulkSpec) {
spec = bulkSpec;
}
public static StatefulBatching fromSpec(BulkIO spec) {
return new StatefulBatching(spec);
}
@Override
public PCollection<KV<Integer, Iterable<String>>> expand(PCollection<String> input) {
GroupIntoBatches<Integer, String> groupIntoBatches =
GroupIntoBatches.ofSize(spec.getMaxBatchSize());
if (spec.getMaxBufferingDuration() != null) {
groupIntoBatches =
groupIntoBatches.withMaxBufferingDuration(spec.getMaxBufferingDuration());
}
return input
.apply(ParDo.of(new Reshuffle.AssignShardFn<>(spec.getMaxParallelRequestsPerWindow())))
.apply(groupIntoBatches);
}
}
@Override
public PDone expand(PCollection<String> input) {
ConnectionConfiguration connectionConfiguration = getConnectionConfiguration();
checkState(connectionConfiguration != null, "withConnectionConfiguration() is required");
if (getUseStatefulBatches()) {
input.apply(StatefulBatching.fromSpec(this)).apply(ParDo.of(new BulkIOStatefulFn(this)));
} else {
input.apply(ParDo.of(new BulkIOBundleFn(this)));
}
return PDone.in(input.getPipeline());
}
static class BulkIOBundleFn extends BulkIOBaseFn<String> {
@VisibleForTesting
BulkIOBundleFn(BulkIO bulkSpec) {
super(bulkSpec);
}
@ProcessElement
public void processElement(ProcessContext context) throws Exception {
String bulkApiEntity = context.element();
addAndMaybeFlush(bulkApiEntity);
}
}
/*
Intended for use in conjunction with {@link GroupIntoBatches}
*/
static class BulkIOStatefulFn extends BulkIOBaseFn<KV<Integer, Iterable<String>>> {
@VisibleForTesting
BulkIOStatefulFn(BulkIO bulkSpec) {
super(bulkSpec);
}
@ProcessElement
public void processElement(ProcessContext context) throws Exception {
Iterable<String> bulkApiEntities = context.element().getValue();
for (String bulkApiEntity : bulkApiEntities) {
addAndMaybeFlush(bulkApiEntity);
}
}
}
/** {@link DoFn} to for the {@link BulkIO} transform. */
@VisibleForTesting
private abstract static class BulkIOBaseFn<T> extends DoFn<T, Void> {
private static final Duration RETRY_INITIAL_BACKOFF = Duration.standardSeconds(5);
private transient FluentBackoff retryBackoff;
private BulkIO spec;
private transient RestClient restClient;
private ArrayList<String> batch;
long currentBatchSizeBytes;
protected BulkIOBaseFn(BulkIO bulkSpec) {
this.spec = bulkSpec;
}
@Setup
public void setup() throws IOException {
ConnectionConfiguration connectionConfiguration = spec.getConnectionConfiguration();
restClient = connectionConfiguration.createClient();
retryBackoff =
FluentBackoff.DEFAULT.withMaxRetries(0).withInitialBackoff(RETRY_INITIAL_BACKOFF);
if (spec.getRetryConfiguration() != null) {
retryBackoff =
FluentBackoff.DEFAULT
.withInitialBackoff(RETRY_INITIAL_BACKOFF)
.withMaxRetries(spec.getRetryConfiguration().getMaxAttempts() - 1)
.withMaxCumulativeBackoff(spec.getRetryConfiguration().getMaxDuration());
}
}
@StartBundle
public void startBundle(StartBundleContext context) {
batch = new ArrayList<>();
currentBatchSizeBytes = 0;
}
@FinishBundle
public void finishBundle(FinishBundleContext context)
throws IOException, InterruptedException {
flushBatch();
}
protected void addAndMaybeFlush(String bulkApiEntity)
throws IOException, InterruptedException {
batch.add(bulkApiEntity);
currentBatchSizeBytes += bulkApiEntity.getBytes(StandardCharsets.UTF_8).length;
if (batch.size() >= spec.getMaxBatchSize()
|| currentBatchSizeBytes >= spec.getMaxBatchSizeBytes()) {
flushBatch();
}
}
private boolean isRetryableClientException(Throwable t) {
return t.getCause() instanceof ConnectTimeoutException
|| t.getCause() instanceof SocketTimeoutException
|| t.getCause() instanceof ConnectionClosedException
|| t.getCause() instanceof ConnectException;
}
private void flushBatch() throws IOException, InterruptedException {
if (batch.isEmpty()) {
return;
}
LOG.info(
"ElasticsearchIO batch size: {}, batch size bytes: {}",
batch.size(),
currentBatchSizeBytes);
StringBuilder bulkRequest = new StringBuilder();
for (String json : batch) {
bulkRequest.append(json);
}
batch.clear();
currentBatchSizeBytes = 0L;
Response response = null;
HttpEntity responseEntity = null;
String endPoint = spec.getConnectionConfiguration().getBulkEndPoint();
HttpEntity requestBody =
new NStringEntity(bulkRequest.toString(), ContentType.APPLICATION_JSON);
try {
Request request = new Request("POST", endPoint);
request.addParameters(Collections.emptyMap());
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
if (spec.getRetryConfiguration() == null || !isRetryableClientException(ex)) {
throw ex;
}
LOG.error("Caught ES timeout, retrying", ex);
}
if (spec.getRetryConfiguration() != null
&& (response == null
|| responseEntity == null
|| spec.getRetryConfiguration().getRetryPredicate().test(responseEntity))) {
if (responseEntity != null
&& spec.getRetryConfiguration().getRetryPredicate().test(responseEntity)) {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
responseEntity = handleRetry("POST", endPoint, Collections.emptyMap(), requestBody);
}
checkForErrors(responseEntity, spec.getAllowedResponseErrors());
}
/** retry request based on retry configuration policy. */
private HttpEntity handleRetry(
String method, String endpoint, Map<String, String> params, HttpEntity requestBody)
throws IOException, InterruptedException {
Response response;
HttpEntity responseEntity = null;
Sleeper sleeper = Sleeper.DEFAULT;
BackOff backoff = retryBackoff.backoff();
int attempt = 0;
while (BackOffUtils.next(sleeper, backoff)) {
LOG.warn(String.format(RETRY_ATTEMPT_LOG, ++attempt));
try {
Request request = new Request(method, endpoint);
request.addParameters(params);
request.setEntity(requestBody);
response = restClient.performRequest(request);
responseEntity = new BufferedHttpEntity(response.getEntity());
} catch (java.io.IOException ex) {
if (isRetryableClientException(ex)) {
LOG.error("Caught ES timeout, retrying", ex);
continue;
}
}
if (!Objects.requireNonNull(spec.getRetryConfiguration())
.getRetryPredicate()
.test(responseEntity)) {
return responseEntity;
} else {
LOG.warn("ES Cluster is responding with HTP 429 - TOO_MANY_REQUESTS.");
}
}
throw new IOException(String.format(RETRY_FAILED_LOG, attempt));
}
@Teardown
public void closeClient() throws IOException {
if (restClient != null) {
restClient.close();
}
}
}
}
static int getBackendVersion(ConnectionConfiguration connectionConfiguration) {
try (RestClient restClient = connectionConfiguration.createClient()) {
Request request = new Request("GET", "");
Response response = restClient.performRequest(request);
JsonNode jsonNode = parseResponse(response.getEntity());
int backendVersion =
Integer.parseInt(jsonNode.path("version").path("number").asText().substring(0, 1));
checkArgument(
(VALID_CLUSTER_VERSIONS.contains(backendVersion)),
"The Elasticsearch version to connect to is %s.x. "
+ "This version of the ElasticsearchIO is only compatible with "
+ "Elasticsearch v7.x, v6.x, v5.x and v2.x",
backendVersion);
return backendVersion;
} catch (IOException e) {
throw new IllegalArgumentException("Cannot get Elasticsearch version", e);
}
}
} | class BoundedElasticsearchReader extends BoundedSource.BoundedReader<String> {
private final BoundedElasticsearchSource source;
private RestClient restClient;
private String current;
private String scrollId;
private ListIterator<String> batchIterator;
private BoundedElasticsearchReader(BoundedElasticsearchSource source) {
this.source = source;
}
@Override | class BoundedElasticsearchReader extends BoundedSource.BoundedReader<String> {
private final BoundedElasticsearchSource source;
private RestClient restClient;
private String current;
private String scrollId;
private ListIterator<String> batchIterator;
private BoundedElasticsearchReader(BoundedElasticsearchSource source) {
this.source = source;
}
@Override |
`ResponseImpl.java` is used by both the client and the server, is this change meant to change both? | public Object getEntity() {
checkClosed();
if (entity instanceof GenericEntity genericEntity) {
if (genericEntity.getRawType().equals(genericEntity.getType())) {
return ((GenericEntity<?>) entity).getEntity();
}
}
return entity == null ? entityStream : entity;
} | return entity == null ? entityStream : entity; | public Object getEntity() {
checkClosed();
if (entity instanceof GenericEntity genericEntity) {
if (genericEntity.getRawType().equals(genericEntity.getType())) {
return ((GenericEntity<?>) entity).getEntity();
}
}
return entity == null ? entityStream : entity;
} | class ResponseImpl extends Response {
int status;
String reasonPhrase;
protected Object entity;
MultivaluedTreeMap<String, Object> headers;
InputStream entityStream;
StatusTypeImpl statusType;
MultivaluedMap<String, String> stringHeaders;
Annotation[] entityAnnotations;
protected boolean consumed;
protected boolean closed;
protected boolean buffered;
@Override
public int getStatus() {
return status;
}
/**
* Internal: this is just cheaper than duplicating the response just to change the status
*/
public void setStatus(int status) {
this.status = status;
statusType = null;
}
@Override
public StatusType getStatusInfo() {
if (statusType == null) {
statusType = new StatusTypeImpl(status, reasonPhrase);
}
return statusType;
}
/**
* Internal: this is just cheaper than duplicating the response just to change the status
*/
public void setStatusInfo(StatusType statusType) {
this.statusType = StatusTypeImpl.valueOf(statusType);
status = statusType.getStatusCode();
}
@Override
protected void setEntity(Object entity) {
this.entity = entity;
if (entity instanceof InputStream inputStream) {
this.entityStream = inputStream;
}
}
public InputStream getEntityStream() {
return entityStream;
}
public void setEntityStream(InputStream entityStream) {
this.entityStream = entityStream;
}
protected <T> T readEntity(Class<T> entityType, Type genericType, Annotation[] annotations) {
if (entity != null && entityType.isInstance(entity)) {
return (T) entity;
}
checkClosed();
throw new ProcessingException(
"Request could not be mapped to type " + (genericType != null ? genericType : entityType));
}
@Override
public <T> T readEntity(Class<T> entityType) {
return readEntity(entityType, entityType, null);
}
@SuppressWarnings("unchecked")
@Override
public <T> T readEntity(GenericType<T> entityType) {
return (T) readEntity(entityType.getRawType(), entityType.getType(), null);
}
@Override
public <T> T readEntity(Class<T> entityType, Annotation[] annotations) {
return readEntity(entityType, entityType, annotations);
}
@SuppressWarnings("unchecked")
@Override
public <T> T readEntity(GenericType<T> entityType, Annotation[] annotations) {
return (T) readEntity(entityType.getRawType(), entityType.getType(), annotations);
}
@Override
public boolean hasEntity() {
checkClosed();
return entity != null || entityStream != null;
}
@Override
public boolean bufferEntity() {
checkClosed();
if (buffered) {
return true;
}
if (entityStream != null && !consumed) {
consumed = true;
if (!entityStream.markSupported()) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int read;
try {
while ((read = entityStream.read(buffer)) != -1) {
os.write(buffer, 0, read);
}
entityStream.close();
} catch (IOException x) {
throw new UncheckedIOException(x);
}
entityStream = new ByteArrayInputStream(os.toByteArray());
}
buffered = true;
return true;
}
return false;
}
protected void checkClosed() {
if (closed && !buffered)
throw new IllegalStateException("Response has been closed");
}
@Override
public void close() {
if (!closed) {
closed = true;
if (entityStream != null) {
try {
entityStream.close();
} catch (IOException e) {
throw new ProcessingException(e);
}
}
}
}
@Override
public MediaType getMediaType() {
return HeaderUtil.getMediaType(headers);
}
@Override
public Locale getLanguage() {
return HeaderUtil.getLanguage(headers);
}
@Override
public int getLength() {
return HeaderUtil.getLength(headers);
}
@Override
public Set<String> getAllowedMethods() {
return HeaderUtil.getAllowedMethods(headers);
}
@Override
public Map<String, NewCookie> getCookies() {
return HeaderUtil.getNewCookies(headers);
}
@Override
public EntityTag getEntityTag() {
return HeaderUtil.getEntityTag(headers);
}
@Override
public Date getDate() {
return HeaderUtil.getDate(headers);
}
@Override
public Date getLastModified() {
return HeaderUtil.getLastModified(headers);
}
@Override
public URI getLocation() {
return HeaderUtil.getLocation(headers);
}
private LinkHeaders getLinkHeaders() {
return new LinkHeaders(headers);
}
@Override
public Set<Link> getLinks() {
return new HashSet<>(getLinkHeaders().getLinks());
}
@Override
public boolean hasLink(String relation) {
return getLinkHeaders().getLinkByRelationship(relation) != null;
}
@Override
public Link getLink(String relation) {
return getLinkHeaders().getLinkByRelationship(relation);
}
@Override
public Builder getLinkBuilder(String relation) {
Link link = getLinkHeaders().getLinkByRelationship(relation);
if (link == null) {
return null;
}
return Link.fromLink(link);
}
@Override
public MultivaluedMap<String, Object> getMetadata() {
return headers;
}
@Override
public MultivaluedMap<String, String> getStringHeaders() {
if (stringHeaders == null) {
stringHeaders = new CaseInsensitiveMap<>();
headers.forEach(this::populateStringHeaders);
}
return stringHeaders;
}
public void populateStringHeaders(String headerName, List<Object> values) {
List<String> stringValues = new ArrayList<>(values.size());
for (int i = 0; i < values.size(); i++) {
stringValues.add(HeaderUtil.headerToString(values.get(i)));
}
stringHeaders.put(headerName, stringValues);
}
@Override
public String getHeaderString(String name) {
return HeaderUtil.getHeaderString(getStringHeaders(), name);
}
public Annotation[] getEntityAnnotations() {
return entityAnnotations;
}
} | class ResponseImpl extends Response {
int status;
String reasonPhrase;
protected Object entity;
MultivaluedTreeMap<String, Object> headers;
InputStream entityStream;
StatusTypeImpl statusType;
MultivaluedMap<String, String> stringHeaders;
Annotation[] entityAnnotations;
protected boolean consumed;
protected boolean closed;
protected boolean buffered;
@Override
public int getStatus() {
return status;
}
/**
* Internal: this is just cheaper than duplicating the response just to change the status
*/
public void setStatus(int status) {
this.status = status;
statusType = null;
}
@Override
public StatusType getStatusInfo() {
if (statusType == null) {
statusType = new StatusTypeImpl(status, reasonPhrase);
}
return statusType;
}
/**
* Internal: this is just cheaper than duplicating the response just to change the status
*/
public void setStatusInfo(StatusType statusType) {
this.statusType = StatusTypeImpl.valueOf(statusType);
status = statusType.getStatusCode();
}
@Override
protected void setEntity(Object entity) {
this.entity = entity;
if (entity instanceof InputStream inputStream) {
this.entityStream = inputStream;
}
}
public InputStream getEntityStream() {
return entityStream;
}
public void setEntityStream(InputStream entityStream) {
this.entityStream = entityStream;
}
protected <T> T readEntity(Class<T> entityType, Type genericType, Annotation[] annotations) {
if (entity != null && entityType.isInstance(entity)) {
return (T) entity;
}
checkClosed();
throw new ProcessingException(
"Request could not be mapped to type " + (genericType != null ? genericType : entityType));
}
@Override
public <T> T readEntity(Class<T> entityType) {
return readEntity(entityType, entityType, null);
}
@SuppressWarnings("unchecked")
@Override
public <T> T readEntity(GenericType<T> entityType) {
return (T) readEntity(entityType.getRawType(), entityType.getType(), null);
}
@Override
public <T> T readEntity(Class<T> entityType, Annotation[] annotations) {
return readEntity(entityType, entityType, annotations);
}
@SuppressWarnings("unchecked")
@Override
public <T> T readEntity(GenericType<T> entityType, Annotation[] annotations) {
return (T) readEntity(entityType.getRawType(), entityType.getType(), annotations);
}
@Override
public boolean hasEntity() {
checkClosed();
return entity != null || entityStream != null;
}
@Override
public boolean bufferEntity() {
checkClosed();
if (buffered) {
return true;
}
if (entityStream != null && !consumed) {
consumed = true;
if (!entityStream.markSupported()) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int read;
try {
while ((read = entityStream.read(buffer)) != -1) {
os.write(buffer, 0, read);
}
entityStream.close();
} catch (IOException x) {
throw new UncheckedIOException(x);
}
entityStream = new ByteArrayInputStream(os.toByteArray());
}
buffered = true;
return true;
}
return false;
}
protected void checkClosed() {
if (closed && !buffered)
throw new IllegalStateException("Response has been closed");
}
@Override
public void close() {
if (!closed) {
closed = true;
if (entityStream != null) {
try {
entityStream.close();
} catch (IOException e) {
throw new ProcessingException(e);
}
}
}
}
@Override
public MediaType getMediaType() {
return HeaderUtil.getMediaType(headers);
}
@Override
public Locale getLanguage() {
return HeaderUtil.getLanguage(headers);
}
@Override
public int getLength() {
return HeaderUtil.getLength(headers);
}
@Override
public Set<String> getAllowedMethods() {
return HeaderUtil.getAllowedMethods(headers);
}
@Override
public Map<String, NewCookie> getCookies() {
return HeaderUtil.getNewCookies(headers);
}
@Override
public EntityTag getEntityTag() {
return HeaderUtil.getEntityTag(headers);
}
@Override
public Date getDate() {
return HeaderUtil.getDate(headers);
}
@Override
public Date getLastModified() {
return HeaderUtil.getLastModified(headers);
}
@Override
public URI getLocation() {
return HeaderUtil.getLocation(headers);
}
private LinkHeaders getLinkHeaders() {
return new LinkHeaders(headers);
}
@Override
public Set<Link> getLinks() {
return new HashSet<>(getLinkHeaders().getLinks());
}
@Override
public boolean hasLink(String relation) {
return getLinkHeaders().getLinkByRelationship(relation) != null;
}
@Override
public Link getLink(String relation) {
return getLinkHeaders().getLinkByRelationship(relation);
}
@Override
public Builder getLinkBuilder(String relation) {
Link link = getLinkHeaders().getLinkByRelationship(relation);
if (link == null) {
return null;
}
return Link.fromLink(link);
}
@Override
public MultivaluedMap<String, Object> getMetadata() {
return headers;
}
@Override
public MultivaluedMap<String, String> getStringHeaders() {
if (stringHeaders == null) {
stringHeaders = new CaseInsensitiveMap<>();
headers.forEach(this::populateStringHeaders);
}
return stringHeaders;
}
public void populateStringHeaders(String headerName, List<Object> values) {
List<String> stringValues = new ArrayList<>(values.size());
for (int i = 0; i < values.size(); i++) {
stringValues.add(HeaderUtil.headerToString(values.get(i)));
}
stringHeaders.put(headerName, stringValues);
}
@Override
public String getHeaderString(String name) {
return HeaderUtil.getHeaderString(getStringHeaders(), name);
}
public Annotation[] getEntityAnnotations() {
return entityAnnotations;
}
} |
yes, this will be caught in the method that calls it, and the log will be printed | private void validateTableAndColumn(StatsCategoryDesc categoryDesc) throws AnalysisException {
long dbId = categoryDesc.getDbId();
long tblId = categoryDesc.getTableId();
String columnName = categoryDesc.getColumnName();
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbId);
Table table = db.getTableOrAnalysisException(tblId);
if (!Strings.isNullOrEmpty(columnName)) {
Column column = table.getColumn(columnName);
if (column == null) {
throw new AnalysisException("Column " + columnName + " does not exist in table " + table.getName());
}
}
} | if (column == null) { | private void validateTableAndColumn(StatsCategoryDesc categoryDesc) throws AnalysisException {
long dbId = categoryDesc.getDbId();
long tblId = categoryDesc.getTableId();
String columnName = categoryDesc.getColumnName();
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbId);
Table table = db.getTableOrAnalysisException(tblId);
if (!Strings.isNullOrEmpty(columnName)) {
Column column = table.getColumn(columnName);
if (column == null) {
throw new AnalysisException("Column " + columnName + " does not exist in table " + table.getName());
}
}
} | class StatisticsManager {
private final static Logger LOG = LogManager.getLogger(StatisticsTaskScheduler.class);
private final Statistics statistics;
public StatisticsManager() {
this.statistics = new Statistics();
}
public void alterTableStatistics(AlterTableStatsStmt stmt)
throws AnalysisException {
Table table = validateTableName(stmt.getTableName());
this.statistics.updateTableStats(table.getId(), stmt.getProperties());
}
public void alterColumnStatistics(AlterColumnStatsStmt stmt) throws AnalysisException {
Table table = validateTableName(stmt.getTableName());
String columnName = stmt.getColumnName();
Column column = table.getColumn(columnName);
if (column == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, columnName, table.getName());
}
this.statistics.updateColumnStats(table.getId(), columnName, column.getType(), stmt.getProperties());
}
public List<List<String>> showTableStatsList(String dbName, String tableName)
throws AnalysisException {
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbName);
List<List<String>> result = Lists.newArrayList();
if (tableName != null) {
Table table = db.getTableOrAnalysisException(tableName);
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, tableName,
PrivPredicate.SHOW)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW CREATE TABLE",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
dbName + ": " + tableName);
}
result.add(showTableStats(table));
} else {
for (Table table : db.getTables()) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, table.getName(),
PrivPredicate.SHOW)) {
continue;
}
try {
result.add(showTableStats(table));
} catch (AnalysisException e) {
}
}
}
return result;
}
public List<List<String>> showColumnStatsList(TableName tableName) throws AnalysisException {
Table table = validateTableName(tableName);
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tableName.getDb(),
tableName.getTbl(), PrivPredicate.SHOW)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW CREATE TABLE",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
tableName.getDb() + ": " + tableName.getTbl());
}
List<List<String>> result = Lists.newArrayList();
Map<String, ColumnStats> nameToColumnStats = this.statistics.getColumnStats(table.getId());
if (nameToColumnStats == null) {
throw new AnalysisException("There is no column statistics in this table:" + table.getName());
}
for (Map.Entry<String, ColumnStats> entry : nameToColumnStats.entrySet()) {
List<String> row = Lists.newArrayList();
row.add(entry.getKey());
row.addAll(entry.getValue().getShowInfo());
result.add(row);
}
return result;
}
private List<String> showTableStats(Table table) throws AnalysisException {
TableStats tableStats = this.statistics.getTableStats(table.getId());
if (tableStats == null) {
throw new AnalysisException("There is no statistics in this table:" + table.getName());
}
List<String> row = Lists.newArrayList();
row.add(table.getName());
row.addAll(tableStats.getShowInfo());
return row;
}
public void alterTableStatistics(StatisticsTaskResult taskResult) throws AnalysisException {
StatsCategoryDesc categoryDesc = taskResult.getCategoryDesc();
validateTableAndColumn(categoryDesc);
long tblId = categoryDesc.getTableId();
Map<String, String> statsNameToValue = taskResult.getStatsNameToValue();
this.statistics.updateTableStats(tblId, statsNameToValue);
}
public void alterColumnStatistics(StatisticsTaskResult taskResult) throws AnalysisException {
StatsCategoryDesc categoryDesc = taskResult.getCategoryDesc();
validateTableAndColumn(categoryDesc);
long dbId = categoryDesc.getDbId();
long tblId = categoryDesc.getTableId();
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbId);
Table table = db.getTableOrAnalysisException(tblId);
String columnName = categoryDesc.getColumnName();
Type columnType = table.getColumn(columnName).getType();
Map<String, String> statsNameToValue = taskResult.getStatsNameToValue();
this.statistics.updateColumnStats(tblId, columnName, columnType, statsNameToValue);
}
private Table validateTableName(TableName dbTableName) throws AnalysisException {
String dbName = dbTableName.getDb();
String tableName = dbTableName.getTbl();
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbName);
return db.getTableOrAnalysisException(tableName);
}
} | class StatisticsManager {
private final static Logger LOG = LogManager.getLogger(StatisticsManager.class);
private Statistics statistics;
public StatisticsManager() {
statistics = new Statistics();
}
public void alterTableStatistics(AlterTableStatsStmt stmt)
throws AnalysisException {
Table table = validateTableName(stmt.getTableName());
statistics.updateTableStats(table.getId(), stmt.getStatsTypeToValue());
}
public void alterColumnStatistics(AlterColumnStatsStmt stmt) throws AnalysisException {
Table table = validateTableName(stmt.getTableName());
String columnName = stmt.getColumnName();
Column column = table.getColumn(columnName);
if (column == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, columnName, table.getName());
}
statistics.updateColumnStats(table.getId(), columnName, column.getType(), stmt.getStatsTypeToValue());
}
public List<List<String>> showTableStatsList(String dbName, String tableName)
throws AnalysisException {
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbName);
List<List<String>> result = Lists.newArrayList();
if (tableName != null) {
Table table = db.getTableOrAnalysisException(tableName);
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, tableName,
PrivPredicate.SHOW)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW CREATE TABLE",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
dbName + ": " + tableName);
}
result.add(showTableStats(table));
} else {
for (Table table : db.getTables()) {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, table.getName(),
PrivPredicate.SHOW)) {
continue;
}
try {
result.add(showTableStats(table));
} catch (AnalysisException e) {
}
}
}
return result;
}
public List<List<String>> showColumnStatsList(TableName tableName) throws AnalysisException {
Table table = validateTableName(tableName);
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tableName.getDb(),
tableName.getTbl(), PrivPredicate.SHOW)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW CREATE TABLE",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
tableName.getDb() + ": " + tableName.getTbl());
}
List<List<String>> result = Lists.newArrayList();
Map<String, ColumnStats> nameToColumnStats = statistics.getColumnStats(table.getId());
if (nameToColumnStats == null) {
throw new AnalysisException("There is no column statistics in this table:" + table.getName());
}
for (Map.Entry<String, ColumnStats> entry : nameToColumnStats.entrySet()) {
List<String> row = Lists.newArrayList();
row.add(entry.getKey());
row.addAll(entry.getValue().getShowInfo());
result.add(row);
}
return result;
}
private List<String> showTableStats(Table table) throws AnalysisException {
TableStats tableStats = statistics.getTableStats(table.getId());
if (tableStats == null) {
throw new AnalysisException("There is no statistics in this table:" + table.getName());
}
List<String> row = Lists.newArrayList();
row.add(table.getName());
row.addAll(tableStats.getShowInfo());
return row;
}
public void alterTableStatistics(StatisticsTaskResult taskResult) throws AnalysisException {
StatsCategoryDesc categoryDesc = taskResult.getCategoryDesc();
validateTableAndColumn(categoryDesc);
long tblId = categoryDesc.getTableId();
Map<StatsType, String> statsTypeToValue = taskResult.getStatsTypeToValue();
statistics.updateTableStats(tblId, statsTypeToValue);
}
public void alterColumnStatistics(StatisticsTaskResult taskResult) throws AnalysisException {
StatsCategoryDesc categoryDesc = taskResult.getCategoryDesc();
validateTableAndColumn(categoryDesc);
long dbId = categoryDesc.getDbId();
long tblId = categoryDesc.getTableId();
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbId);
Table table = db.getTableOrAnalysisException(tblId);
String columnName = categoryDesc.getColumnName();
Type columnType = table.getColumn(columnName).getType();
Map<StatsType, String> statsTypeToValue = taskResult.getStatsTypeToValue();
statistics.updateColumnStats(tblId, columnName, columnType, statsTypeToValue);
}
private Table validateTableName(TableName dbTableName) throws AnalysisException {
String dbName = dbTableName.getDb();
String tableName = dbTableName.getTbl();
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbName);
return db.getTableOrAnalysisException(tableName);
}
public Statistics getStatistics() {
return statistics;
}
} |
`null` instead of `Optional.empty`? Missing unwrapping of `id`. | void summarize_bill() {
var req = request("/billing/v2/accountant/bill/id-1/summary?keys=plan,architecture")
.roles(Role.hostedAccountant());
tester.assertResponse(req, """
{"id":"BillId{value='id-1'}","summary":[{"key":{"plan":"paid","architecture":"Optional.empty"},"summary":{"cpu":{"cost":"0","hours":"0"},"memory":{"cost":"0","hours":"0"},"disk":{"cost":"0","hours":"0"},"gpu":{"cost":"0","hours":"0"}}}]}""");
} | {"id":"BillId{value='id-1'}","summary":[{"key":{"plan":"paid","architecture":"Optional.empty"},"summary":{"cpu":{"cost":"0","hours":"0"},"memory":{"cost":"0","hours":"0"},"disk":{"cost":"0","hours":"0"},"gpu":{"cost":"0","hours":"0"}}}]}"""); | void summarize_bill() {
var req = request("/billing/v2/accountant/bill/id-1/summary?keys=plan,architecture")
.roles(Role.hostedAccountant());
tester.assertResponse(req, """
{"id":"BillId{value='id-1'}","summary":[{"key":{"plan":"paid","architecture":null},"summary":{"cpu":{"cost":"0","hours":"0"},"memory":{"cost":"0","hours":"0"},"disk":{"cost":"0","hours":"0"},"gpu":{"cost":"0","hours":"0"}}}]}""");
} | class BillingApiHandlerV2Test extends ControllerContainerCloudTest {
private static final String responseFiles = "src/test/java/com/yahoo/vespa/hosted/controller/restapi/billing/responses/";
private static final TenantName tenant = TenantName.from("tenant1");
private static final TenantName tenant2 = TenantName.from("tenant2");
private static final Set<Role> tenantReader = Set.of(Role.reader(tenant));
private static final Set<Role> tenantAdmin = Set.of(Role.administrator(tenant));
private static final Set<Role> financeAdmin = Set.of(Role.hostedAccountant());
private MockBillingController billingController;
private ContainerTester tester;
@BeforeEach
public void before() {
tester = new ContainerTester(container, responseFiles);
tester.controller().tenants().create(new CloudTenantSpec(tenant, ""), new Auth0Credentials(() -> "foo", Set.of(Role.hostedOperator())));
var clock = (ManualClock) tester.controller().serviceRegistry().clock();
clock.setInstant(Instant.parse("2021-04-13T00:00:00Z"));
billingController = (MockBillingController) tester.serviceRegistry().billingController();
billingController.addBill(tenant, createBill(), true);
}
@Override
protected String variablePartXml() {
return " <component id='com.yahoo.vespa.hosted.controller.security.CloudAccessControlRequests'/>\n" +
" <component id='com.yahoo.vespa.hosted.controller.security.CloudAccessControl'/>\n" +
" <handler id='com.yahoo.vespa.hosted.controller.restapi.billing.BillingApiHandlerV2'>\n" +
" <binding>http:
" </handler>\n" +
" <http>\n" +
" <server id='default' port='8080' />\n" +
" <filtering>\n" +
" <request-chain id='default'>\n" +
" <filter id='com.yahoo.vespa.hosted.controller.restapi.filter.ControllerAuthorizationFilter'/>\n" +
" <binding>http:
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n";
}
@Test
void require_tenant_info() {
var request = request("/billing/v2/tenant/" + tenant.value()).roles(tenantReader);
tester.assertResponse(request, "{\"tenant\":\"tenant1\",\"plan\":{\"id\":\"trial\",\"name\":\"Free Trial - for testing purposes\"},\"collection\":\"AUTO\"}");
}
@Test
void require_accountant_for_update_collection() {
var request = request("/billing/v2/tenant/" + tenant.value(), Request.Method.PATCH)
.data("{\"collection\": \"INVOICE\"}");
var forbidden = request.roles(tenantAdmin);
tester.assertResponse(forbidden, """
{
"code" : 403,
"message" : "Access denied"
}""", 403);
var success = request.roles(financeAdmin);
tester.assertResponse(success, """
{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes"},"collection":"INVOICE"}""");
}
@Test
void require_tenant_usage() {
var request = request("/billing/v2/tenant/" + tenant + "/usage").roles(tenantReader);
tester.assertResponse(request, "{\"from\":\"2021-04-13\",\"to\":\"2021-04-13\",\"total\":\"0.00\",\"items\":[]}");
}
@Test
void require_tenant_invoice() {
var listRequest = request("/billing/v2/tenant/" + tenant + "/bill").roles(tenantReader);
tester.assertResponse(listRequest, "{\"invoices\":[{\"id\":\"id-1\",\"from\":\"2020-05-23\",\"to\":\"2020-05-28\",\"total\":\"123.00\",\"status\":\"OPEN\"}]}");
var singleRequest = request("/billing/v2/tenant/" + tenant + "/bill/id-1").roles(tenantReader);
tester.assertResponse(singleRequest, """
{"id":"id-1","from":"2020-05-23","to":"2020-05-28","total":"123.00","status":"OPEN","statusHistory":[{"at":"2020-05-23T00:00:00Z","status":"OPEN"}],"items":[{"id":"some-id","description":"description","amount":"123.00","plan":{"id":"paid","name":"Paid Plan - for testing purposes"},"majorVersion":0,"cpu":{},"memory":{},"disk":{},"gpu":{}}]}""");
}
@Test
void require_accountant_summary() {
var tenantRequest = request("/billing/v2/accountant").roles(tenantReader);
tester.assertResponse(tenantRequest, "{\n" +
" \"code\" : 403,\n" +
" \"message\" : \"Access denied\"\n" +
"}", 403);
var accountantRequest = request("/billing/v2/accountant").roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"tenants":[{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes"},"quota":{"budget":-1.0},"collection":"AUTO","lastBill":"1970-01-01","unbilled":"0.00"}]}""");
}
@Test
void require_accountant_preview() {
var accountantRequest = request("/billing/v2/accountant/preview").roles(Role.hostedAccountant());
billingController.uncommittedBills.put(tenant, createBill());
tester.assertResponse(accountantRequest, """
{"tenants":[{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes"},"quota":{"budget":-1.0},"collection":"AUTO","lastBill":"2020-05-23","unbilled":"123.00"}]}""");
}
@Test
void require_accountant_tenant_preview() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/preview").roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, "{\"id\":\"empty\",\"from\":\"2021-04-13\",\"to\":\"2021-04-12\",\"total\":\"0.00\",\"status\":\"OPEN\",\"statusHistory\":[{\"at\":\"2021-04-13T00:00:00Z\",\"status\":\"OPEN\"}],\"items\":[]}");
}
@Test
void require_accountant_tenant_bill() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/preview", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("{\"from\": \"2020-05-01\",\"to\": \"2020-06-01\"}");
tester.assertResponse(accountantRequest, "{\"message\":\"Created bill id-123\"}");
}
@Test
void require_list_of_all_plans() {
var accountantRequest = request("/billing/v2/accountant/plans")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, "{\"plans\":[{\"id\":\"trial\",\"name\":\"Free Trial - for testing purposes\"},{\"id\":\"paid\",\"name\":\"Paid Plan - for testing purposes\"},{\"id\":\"none\",\"name\":\"None Plan - for testing purposes\"}]}");
}
@Test
void require_additional_items_empty() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/items")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"items":[]}""");
}
@Test
void require_additional_items_with_content() {
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/items", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("""
{
"description": "Additional support costs",
"amount": "123.45"
}""");
tester.assertResponse(accountantRequest, """
{"message":"Added line item for tenant tenant1"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/items")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"items":[{"id":"line-item-id","description":"Additional support costs","amount":"123.45","plan":{"id":"paid","name":"Paid Plan - for testing purposes"},"majorVersion":0,"cpu":{},"memory":{},"disk":{},"gpu":{}}]}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/item/line-item-id", Request.Method.DELETE)
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"message":"Successfully deleted line item line-item-id"}""");
}
}
@Test
void require_current_plan() {
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/plan")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"id":"trial","name":"Free Trial - for testing purposes"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/plan", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("""
{"id": "paid"}""");
tester.assertResponse(accountantRequest, """
{"message":"Plan: paid"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/plan")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"id":"paid","name":"Paid Plan - for testing purposes"}""");
}
}
@Test
void require_current_collection() {
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/collection")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"collection":"AUTO"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/collection", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("""
{"collection": "INVOICE"}""");
tester.assertResponse(accountantRequest, """
{"message":"Collection: INVOICE"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/collection")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"collection":"INVOICE"}""");
}
}
@Test
void require_accountant_tenant() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes","billed":false,"supported":false},"billing":{},"collection":"AUTO"}""");
}
@Test
void lists_accepted_countries() {
var req = request("/billing/v2/countries").roles(tenantReader);
tester.assertJsonResponse(req, new File("accepted-countries.json"));
}
@Test
private static Bill createBill() {
var start = LocalDate.of(2020, 5, 23).atStartOfDay(ZoneOffset.UTC);
var end = start.toLocalDate().plusDays(6).atStartOfDay(ZoneOffset.UTC);
var statusHistory = new StatusHistory(new TreeMap<>(Map.of(start, BillStatus.OPEN)));
return new Bill(
Bill.Id.of("id-1"),
TenantName.defaultName(),
statusHistory,
List.of(createLineItem(start)),
start,
end
);
}
static Bill.LineItem createLineItem(ZonedDateTime addedAt) {
return new Bill.LineItem(
"some-id",
"description",
new BigDecimal("123.00"),
"paid",
"Smith",
addedAt
);
}
} | class BillingApiHandlerV2Test extends ControllerContainerCloudTest {
private static final String responseFiles = "src/test/java/com/yahoo/vespa/hosted/controller/restapi/billing/responses/";
private static final TenantName tenant = TenantName.from("tenant1");
private static final TenantName tenant2 = TenantName.from("tenant2");
private static final Set<Role> tenantReader = Set.of(Role.reader(tenant));
private static final Set<Role> tenantAdmin = Set.of(Role.administrator(tenant));
private static final Set<Role> financeAdmin = Set.of(Role.hostedAccountant());
private MockBillingController billingController;
private ContainerTester tester;
@BeforeEach
public void before() {
tester = new ContainerTester(container, responseFiles);
tester.controller().tenants().create(new CloudTenantSpec(tenant, ""), new Auth0Credentials(() -> "foo", Set.of(Role.hostedOperator())));
var clock = (ManualClock) tester.controller().serviceRegistry().clock();
clock.setInstant(Instant.parse("2021-04-13T00:00:00Z"));
billingController = (MockBillingController) tester.serviceRegistry().billingController();
billingController.addBill(tenant, createBill(), true);
}
@Override
protected String variablePartXml() {
return " <component id='com.yahoo.vespa.hosted.controller.security.CloudAccessControlRequests'/>\n" +
" <component id='com.yahoo.vespa.hosted.controller.security.CloudAccessControl'/>\n" +
" <handler id='com.yahoo.vespa.hosted.controller.restapi.billing.BillingApiHandlerV2'>\n" +
" <binding>http:
" </handler>\n" +
" <http>\n" +
" <server id='default' port='8080' />\n" +
" <filtering>\n" +
" <request-chain id='default'>\n" +
" <filter id='com.yahoo.vespa.hosted.controller.restapi.filter.ControllerAuthorizationFilter'/>\n" +
" <binding>http:
" </request-chain>\n" +
" </filtering>\n" +
" </http>\n";
}
@Test
void require_tenant_info() {
var request = request("/billing/v2/tenant/" + tenant.value()).roles(tenantReader);
tester.assertResponse(request, "{\"tenant\":\"tenant1\",\"plan\":{\"id\":\"trial\",\"name\":\"Free Trial - for testing purposes\"},\"collection\":\"AUTO\"}");
}
@Test
void require_accountant_for_update_collection() {
var request = request("/billing/v2/tenant/" + tenant.value(), Request.Method.PATCH)
.data("{\"collection\": \"INVOICE\"}");
var forbidden = request.roles(tenantAdmin);
tester.assertResponse(forbidden, """
{
"code" : 403,
"message" : "Access denied"
}""", 403);
var success = request.roles(financeAdmin);
tester.assertResponse(success, """
{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes"},"collection":"INVOICE"}""");
}
@Test
void require_tenant_usage() {
var request = request("/billing/v2/tenant/" + tenant + "/usage").roles(tenantReader);
tester.assertResponse(request, "{\"from\":\"2021-04-13\",\"to\":\"2021-04-13\",\"total\":\"0.00\",\"items\":[]}");
}
@Test
void require_tenant_invoice() {
var listRequest = request("/billing/v2/tenant/" + tenant + "/bill").roles(tenantReader);
tester.assertResponse(listRequest, "{\"invoices\":[{\"id\":\"id-1\",\"from\":\"2020-05-23\",\"to\":\"2020-05-28\",\"total\":\"123.00\",\"status\":\"OPEN\"}]}");
var singleRequest = request("/billing/v2/tenant/" + tenant + "/bill/id-1").roles(tenantReader);
tester.assertResponse(singleRequest, """
{"id":"id-1","from":"2020-05-23","to":"2020-05-28","total":"123.00","status":"OPEN","statusHistory":[{"at":"2020-05-23T00:00:00Z","status":"OPEN"}],"items":[{"id":"some-id","description":"description","amount":"123.00","plan":{"id":"paid","name":"Paid Plan - for testing purposes"},"majorVersion":0,"cpu":{},"memory":{},"disk":{},"gpu":{}}]}""");
}
@Test
void require_accountant_summary() {
var tenantRequest = request("/billing/v2/accountant").roles(tenantReader);
tester.assertResponse(tenantRequest, "{\n" +
" \"code\" : 403,\n" +
" \"message\" : \"Access denied\"\n" +
"}", 403);
var accountantRequest = request("/billing/v2/accountant").roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"tenants":[{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes"},"quota":{"budget":-1.0},"collection":"AUTO","lastBill":"1970-01-01","unbilled":"0.00"}]}""");
}
@Test
void require_accountant_preview() {
var accountantRequest = request("/billing/v2/accountant/preview").roles(Role.hostedAccountant());
billingController.uncommittedBills.put(tenant, createBill());
tester.assertResponse(accountantRequest, """
{"tenants":[{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes"},"quota":{"budget":-1.0},"collection":"AUTO","lastBill":"2020-05-23","unbilled":"123.00"}]}""");
}
@Test
void require_accountant_tenant_preview() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/preview").roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, "{\"id\":\"empty\",\"from\":\"2021-04-13\",\"to\":\"2021-04-12\",\"total\":\"0.00\",\"status\":\"OPEN\",\"statusHistory\":[{\"at\":\"2021-04-13T00:00:00Z\",\"status\":\"OPEN\"}],\"items\":[]}");
}
@Test
void require_accountant_tenant_bill() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/preview", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("{\"from\": \"2020-05-01\",\"to\": \"2020-06-01\"}");
tester.assertResponse(accountantRequest, "{\"message\":\"Created bill id-123\"}");
}
@Test
void require_list_of_all_plans() {
var accountantRequest = request("/billing/v2/accountant/plans")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, "{\"plans\":[{\"id\":\"trial\",\"name\":\"Free Trial - for testing purposes\"},{\"id\":\"paid\",\"name\":\"Paid Plan - for testing purposes\"},{\"id\":\"none\",\"name\":\"None Plan - for testing purposes\"}]}");
}
@Test
void require_additional_items_empty() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/items")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"items":[]}""");
}
@Test
void require_additional_items_with_content() {
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/items", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("""
{
"description": "Additional support costs",
"amount": "123.45"
}""");
tester.assertResponse(accountantRequest, """
{"message":"Added line item for tenant tenant1"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/items")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"items":[{"id":"line-item-id","description":"Additional support costs","amount":"123.45","plan":{"id":"paid","name":"Paid Plan - for testing purposes"},"majorVersion":0,"cpu":{},"memory":{},"disk":{},"gpu":{}}]}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/item/line-item-id", Request.Method.DELETE)
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"message":"Successfully deleted line item line-item-id"}""");
}
}
@Test
void require_current_plan() {
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/plan")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"id":"trial","name":"Free Trial - for testing purposes"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/plan", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("""
{"id": "paid"}""");
tester.assertResponse(accountantRequest, """
{"message":"Plan: paid"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/plan")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"id":"paid","name":"Paid Plan - for testing purposes"}""");
}
}
@Test
void require_current_collection() {
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/collection")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"collection":"AUTO"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/collection", Request.Method.POST)
.roles(Role.hostedAccountant())
.data("""
{"collection": "INVOICE"}""");
tester.assertResponse(accountantRequest, """
{"message":"Collection: INVOICE"}""");
}
{
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1/collection")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"collection":"INVOICE"}""");
}
}
@Test
void require_accountant_tenant() {
var accountantRequest = request("/billing/v2/accountant/tenant/tenant1")
.roles(Role.hostedAccountant());
tester.assertResponse(accountantRequest, """
{"tenant":"tenant1","plan":{"id":"trial","name":"Free Trial - for testing purposes","billed":false,"supported":false},"billing":{},"collection":"AUTO"}""");
}
@Test
void lists_accepted_countries() {
var req = request("/billing/v2/countries").roles(tenantReader);
tester.assertJsonResponse(req, new File("accepted-countries.json"));
}
@Test
private static Bill createBill() {
var start = LocalDate.of(2020, 5, 23).atStartOfDay(ZoneOffset.UTC);
var end = start.toLocalDate().plusDays(6).atStartOfDay(ZoneOffset.UTC);
var statusHistory = new StatusHistory(new TreeMap<>(Map.of(start, BillStatus.OPEN)));
return new Bill(
Bill.Id.of("id-1"),
TenantName.defaultName(),
statusHistory,
List.of(createLineItem(start)),
start,
end
);
}
static Bill.LineItem createLineItem(ZonedDateTime addedAt) {
return new Bill.LineItem(
"some-id",
"description",
new BigDecimal("123.00"),
"paid",
"Smith",
addedAt
);
}
} |
Will handle this separately after clarifying. https://github.com/ballerina-platform/ballerina-lang/issues/29758 will be used to track this | public void setForeachTypedBindingPatternType(BLangForeach foreachNode) {
BType collectionType = foreachNode.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
BType constraint = ((BXMLType) collectionType).constraint;
while (constraint.tag == TypeTags.XML) {
collectionType = constraint;
constraint = ((BXMLType) collectionType).constraint;
}
switch (constraint.tag) {
case TypeTags.XML_ELEMENT:
varType = symTable.xmlElementType;
break;
case TypeTags.XML_COMMENT:
varType = symTable.xmlCommentType;
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.XML_PI:
varType = symTable.xmlPIType;
break;
case TypeTags.NEVER:
varType = symTable.neverType;
break;
default:
Set<BType> collectionTypes = getEffectiveMemberTypes((BUnionType) constraint);
Set<BType> builtinXMLConstraintTypes = getEffectiveMemberTypes
((BUnionType) ((BXMLType) symTable.xmlType).constraint);
if (collectionTypes.size() == 4 && builtinXMLConstraintTypes.equals(collectionTypes)) {
varType = symTable.xmlType;
} else {
LinkedHashSet<BType> collectionTypesInSymTable = new LinkedHashSet<>();
for (BType subType : collectionTypes) {
switch (subType.tag) {
case TypeTags.XML_ELEMENT:
collectionTypesInSymTable.add(symTable.xmlElementType);
break;
case TypeTags.XML_COMMENT:
collectionTypesInSymTable.add(symTable.xmlCommentType);
break;
case TypeTags.XML_TEXT:
collectionTypesInSymTable.add(symTable.xmlTextType);
break;
case TypeTags.XML_PI:
collectionTypesInSymTable.add(symTable.xmlPIType);
break;
}
}
varType = BUnionType.create(null, collectionTypesInSymTable);
}
}
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
List<BType> completionType = getAllTypes(streamType.error);
if (completionType.stream().anyMatch(type -> type.tag != TypeTags.NIL)) {
BType actualType = BUnionType.create(null, varType, streamType.error);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varType, actualType);
}
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
foreachNode.resultType = getRecordType(nextMethodReturnType);
BType valueType = (foreachNode.resultType != null)
? ((BRecordType) foreachNode.resultType).fields.get("value").type : null;
BType errorType = getErrorType(nextMethodReturnType);
if (errorType != null) {
BType actualType = BUnionType.create(null, valueType, errorType);
dlog.error(foreachNode.collection.pos,
DiagnosticErrorCode.INVALID_ITERABLE_COMPLETION_TYPE_IN_FOREACH_NEXT_FUNCTION,
actualType, errorType);
}
foreachNode.nillableResultType = nextMethodReturnType;
foreachNode.varType = valueType;
return;
}
case TypeTags.SEMANTIC_ERROR:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
return;
default:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
foreachNode.varType = varType;
foreachNode.resultType = getRecordType(nextMethodReturnType);
foreachNode.nillableResultType = nextMethodReturnType;
} | if (completionType.stream().anyMatch(type -> type.tag != TypeTags.NIL)) { | public void setForeachTypedBindingPatternType(BLangForeach foreachNode) {
BType collectionType = foreachNode.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
BType constraint = ((BXMLType) collectionType).constraint;
while (constraint.tag == TypeTags.XML) {
collectionType = constraint;
constraint = ((BXMLType) collectionType).constraint;
}
switch (constraint.tag) {
case TypeTags.XML_ELEMENT:
varType = symTable.xmlElementType;
break;
case TypeTags.XML_COMMENT:
varType = symTable.xmlCommentType;
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.XML_PI:
varType = symTable.xmlPIType;
break;
case TypeTags.NEVER:
varType = symTable.neverType;
break;
default:
Set<BType> collectionTypes = getEffectiveMemberTypes((BUnionType) constraint);
Set<BType> builtinXMLConstraintTypes = getEffectiveMemberTypes
((BUnionType) ((BXMLType) symTable.xmlType).constraint);
if (collectionTypes.size() == 4 && builtinXMLConstraintTypes.equals(collectionTypes)) {
varType = symTable.xmlType;
} else {
LinkedHashSet<BType> collectionTypesInSymTable = new LinkedHashSet<>();
for (BType subType : collectionTypes) {
switch (subType.tag) {
case TypeTags.XML_ELEMENT:
collectionTypesInSymTable.add(symTable.xmlElementType);
break;
case TypeTags.XML_COMMENT:
collectionTypesInSymTable.add(symTable.xmlCommentType);
break;
case TypeTags.XML_TEXT:
collectionTypesInSymTable.add(symTable.xmlTextType);
break;
case TypeTags.XML_PI:
collectionTypesInSymTable.add(symTable.xmlPIType);
break;
}
}
varType = BUnionType.create(null, collectionTypesInSymTable);
}
}
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
List<BType> completionType = getAllTypes(streamType.error);
if (completionType.stream().anyMatch(type -> type.tag != TypeTags.NIL)) {
BType actualType = BUnionType.create(null, varType, streamType.error);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varType, actualType);
}
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
foreachNode.resultType = getRecordType(nextMethodReturnType);
BType valueType = (foreachNode.resultType != null)
? ((BRecordType) foreachNode.resultType).fields.get("value").type : null;
BType errorType = getErrorType(nextMethodReturnType);
if (errorType != null) {
BType actualType = BUnionType.create(null, valueType, errorType);
dlog.error(foreachNode.collection.pos,
DiagnosticErrorCode.INVALID_ITERABLE_COMPLETION_TYPE_IN_FOREACH_NEXT_FUNCTION,
actualType, errorType);
}
foreachNode.nillableResultType = nextMethodReturnType;
foreachNode.varType = valueType;
return;
}
case TypeTags.SEMANTIC_ERROR:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
return;
default:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
foreachNode.varType = varType;
foreachNode.resultType = getRecordType(nextMethodReturnType);
foreachNode.nillableResultType = nextMethodReturnType;
} | class Types {
private static final CompilerContext.Key<Types> TYPES_KEY =
new CompilerContext.Key<>();
private final Unifier unifier;
private SymbolTable symTable;
private SymbolResolver symResolver;
private BLangDiagnosticLog dlog;
private Names names;
private int finiteTypeCount = 0;
private BUnionType expandedXMLBuiltinSubtypes;
private final BLangAnonymousModelHelper anonymousModelHelper;
private int recordCount = 0;
private SymbolEnv env;
private boolean inOrderedType;
public static Types getInstance(CompilerContext context) {
Types types = context.get(TYPES_KEY);
if (types == null) {
types = new Types(context);
}
return types;
}
public Types(CompilerContext context) {
context.put(TYPES_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.names = Names.getInstance(context);
this.expandedXMLBuiltinSubtypes = BUnionType.create(null,
symTable.xmlElementType, symTable.xmlCommentType,
symTable.xmlPIType, symTable.xmlTextType);
this.unifier = new Unifier();
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
}
public List<BType> checkTypes(BLangExpression node,
List<BType> actualTypes,
List<BType> expTypes) {
List<BType> resTypes = new ArrayList<>();
for (int i = 0; i < actualTypes.size(); i++) {
resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));
}
return resTypes;
}
public BType checkType(BLangExpression node,
BType actualType,
BType expType) {
return checkType(node, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkType(BLangExpression expr,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
expr.type = checkType(expr.pos, actualType, expType, diagCode);
if (expr.type.tag == TypeTags.SEMANTIC_ERROR) {
return expr.type;
}
setImplicitCastExpr(expr, actualType, expType);
return expr.type;
}
public BType checkType(Location pos,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
if (expType.tag == TypeTags.SEMANTIC_ERROR) {
return expType;
} else if (expType.tag == TypeTags.NONE) {
return actualType;
} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {
return actualType;
} else if (isAssignable(actualType, expType)) {
return actualType;
}
dlog.error(pos, diagCode, expType, actualType);
return symTable.semanticError;
}
public boolean isLax(BType type) {
Set<BType> visited = new HashSet<>();
int result = isLaxType(type, visited);
if (result == 1) {
return true;
}
return false;
}
public int isLaxType(BType type, Set<BType> visited) {
if (!visited.add(type)) {
return -1;
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return 1;
case TypeTags.MAP:
return isLaxType(((BMapType) type).constraint, visited);
case TypeTags.UNION:
if (isSameType(type, symTable.jsonType)) {
visited.add(type);
return 1;
}
boolean atleastOneLaxType = false;
for (BType member : ((BUnionType) type).getMemberTypes()) {
int result = isLaxType(member, visited);
if (result == -1) {
continue;
}
if (result == 0) {
return 0;
}
atleastOneLaxType = true;
}
return atleastOneLaxType ? 1 : 0;
}
return 0;
}
public boolean isLaxType(BType type, Map<BType, Boolean> visited) {
if (visited.containsKey(type)) {
return visited.get(type);
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
visited.put(type, true);
return true;
case TypeTags.MAP:
boolean result = isLaxType(((BMapType) type).constraint, visited);
visited.put(type, result);
return result;
case TypeTags.UNION:
if (type == symTable.jsonType || isSameType(type, symTable.jsonType)) {
visited.put(type, true);
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!isLaxType(member, visited)) {
visited.put(type, false);
return false;
}
}
visited.put(type, true);
return true;
}
visited.put(type, false);
return false;
}
public boolean isSameType(BType source, BType target) {
return isSameType(source, target, new HashSet<>());
}
public boolean isSameOrderedType(BType source, BType target) {
this.inOrderedType = true;
return isSameType(source, target);
}
public boolean isPureType(BType type) {
IsPureTypeUniqueVisitor visitor = new IsPureTypeUniqueVisitor();
return visitor.visit(type);
}
public boolean isAnydata(BType type) {
IsAnydataUniqueVisitor visitor = new IsAnydataUniqueVisitor();
return visitor.visit(type);
}
private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);
return target.accept(sameTypeVisitor, source);
}
public boolean isValueType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.BYTE:
case TypeTags.DECIMAL:
case TypeTags.FLOAT:
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
return true;
default:
return false;
}
}
boolean isBasicNumericType(BType type) {
return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);
}
boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {
return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));
}
public boolean containsErrorType(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(this::containsErrorType);
}
if (type.tag == TypeTags.READONLY) {
return true;
}
return type.tag == TypeTags.ERROR;
}
public boolean isSubTypeOfList(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);
}
BType resolvePatternTypeFromMatchExpr(BLangErrorBindingPattern errorBindingPattern, BLangExpression matchExpr,
SymbolEnv env) {
if (matchExpr == null) {
return errorBindingPattern.type;
}
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
matchExpr.type, errorBindingPattern.type, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
public BType resolvePatternTypeFromMatchExpr(BLangListBindingPattern listBindingPattern,
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern,
SymbolEnv env) {
BTupleType listBindingPatternType = (BTupleType) listBindingPattern.type;
if (varBindingPatternMatchPattern.matchExpr == null) {
return listBindingPatternType;
}
BType matchExprType = varBindingPatternMatchPattern.matchExpr.type;
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
matchExprType, listBindingPatternType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listBindingPatternType.tupleTypes, symTable.anydataType);
if (listBindingPatternType.restType != null) {
listBindingPatternType.restType = symTable.anydataType;
}
return listBindingPatternType;
}
return symTable.noType;
}
public BType resolvePatternTypeFromMatchExpr(BLangListMatchPattern listMatchPattern,
BTupleType listMatchPatternType, SymbolEnv env) {
if (listMatchPattern.matchExpr == null) {
return listMatchPatternType;
}
BType matchExprType = listMatchPattern.matchExpr.type;
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
matchExprType, listMatchPatternType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listMatchPatternType.tupleTypes, symTable.anydataType);
if (listMatchPatternType.restType != null) {
listMatchPatternType.restType = symTable.anydataType;
}
return listMatchPatternType;
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangErrorMatchPattern errorMatchPattern, BLangExpression matchExpr) {
if (matchExpr == null) {
return errorMatchPattern.type;
}
BType matchExprType = matchExpr.type;
BType patternType = errorMatchPattern.type;
if (isAssignable(matchExprType, patternType)) {
return matchExprType;
}
if (isAssignable(patternType, matchExprType)) {
return patternType;
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangConstPattern constPattern, BLangExpression constPatternExpr) {
if (constPattern.matchExpr == null) {
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) constPatternExpr).symbol.type;
} else {
return constPatternExpr.type;
}
}
BType matchExprType = constPattern.matchExpr.type;
BType constMatchPatternExprType = constPatternExpr.type;
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef constVarRef = (BLangSimpleVarRef) constPatternExpr;
BType constVarRefSymbolType = constVarRef.symbol.type;
if (isAssignable(constVarRefSymbolType, matchExprType)) {
return constVarRefSymbolType;
}
return symTable.noType;
}
BLangLiteral constPatternLiteral = (BLangLiteral) constPatternExpr;
if (containsAnyType(constMatchPatternExprType)) {
return matchExprType;
} else if (containsAnyType(matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.BYTE && constMatchPatternExprType.tag == TypeTags.INT) {
return matchExprType;
}
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) matchExprType).getMemberTypes()) {
if (memberType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(memberType, constPatternLiteral)) {
return memberType;
}
} else {
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
}
}
} else if (matchExprType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(matchExprType, constPatternLiteral)) {
return matchExprType;
}
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangMappingMatchPattern mappingMatchPattern, BType patternType,
SymbolEnv env) {
if (mappingMatchPattern.matchExpr == null) {
return patternType;
}
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
mappingMatchPattern.matchExpr.type, patternType, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
public BType resolvePatternTypeFromMatchExpr(BLangMappingBindingPattern mappingBindingPattern,
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern,
SymbolEnv env) {
BRecordType mappingBindingPatternType = (BRecordType) mappingBindingPattern.type;
if (varBindingPatternMatchPattern.matchExpr == null) {
return mappingBindingPatternType;
}
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
varBindingPatternMatchPattern.matchExpr.type,
mappingBindingPatternType, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
private boolean containsAnyType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANY;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANY) {
return true;
}
}
return false;
}
private boolean containsAnyDataType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANYDATA;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANYDATA) {
return true;
}
}
return false;
}
BType mergeTypes(BType typeFirst, BType typeSecond) {
if (containsAnyType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (containsAnyDataType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyDataType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (isSameBasicType(typeFirst, typeSecond)) {
return typeFirst;
}
return BUnionType.create(null, typeFirst, typeSecond);
}
public boolean isSubTypeOfMapping(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);
}
public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {
if (type.tag != TypeTags.UNION) {
return type.tag == baseTypeTag || (baseTypeTag == TypeTags.TUPLE && type.tag == TypeTags.ARRAY)
|| (baseTypeTag == TypeTags.ARRAY && type.tag == TypeTags.TUPLE);
}
if (TypeTags.isXMLTypeTag(baseTypeTag)) {
return true;
}
return isUnionMemberTypesSubTypeOfBaseType(((BUnionType) type).getMemberTypes(), baseTypeTag);
}
private boolean isUnionMemberTypesSubTypeOfBaseType(LinkedHashSet<BType> memberTypes, int baseTypeTag) {
for (BType type : memberTypes) {
if (!isSubTypeOfBaseType(type, baseTypeTag)) {
return false;
}
}
return true;
}
/**
* Checks whether source type is assignable to the target type.
* <p>
* Source type is assignable to the target type if,
* 1) the target type is any and the source type is not a value type.
* 2) there exists an implicit cast symbol from source to target.
* 3) both types are JSON and the target constraint is no type.
* 4) both types are array type and both array types are assignable.
* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.
*
* @param source type.
* @param target type.
* @return true if source type is assignable to the target type.
*/
public boolean isAssignable(BType source, BType target) {
return isAssignable(source, target, new HashSet<>());
}
private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (isSameType(source, target)) {
return true;
}
int sourceTag = source.tag;
int targetTag = target.tag;
if (!Symbols.isFlagOn(source.flags, Flags.PARAMETERIZED) &&
!isInherentlyImmutableType(target) && Symbols.isFlagOn(target.flags, Flags.READONLY) &&
!isInherentlyImmutableType(source) && isMutable(source)) {
return false;
}
if (sourceTag == TypeTags.INTERSECTION) {
return isAssignable(((BIntersectionType) source).effectiveType,
targetTag != TypeTags.INTERSECTION ? target :
((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (targetTag == TypeTags.INTERSECTION) {
return isAssignable(source, ((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (sourceTag == TypeTags.PARAMETERIZED_TYPE) {
return isParameterizedTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {
return true;
}
if (TypeTags.isXMLTypeTag(sourceTag) && TypeTags.isXMLTypeTag(targetTag)) {
return isXMLTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {
return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);
} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {
return false;
}
if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {
return true;
}
if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {
return true;
}
if (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && isAnydata(source)) {
return true;
}
if (targetTag == TypeTags.READONLY) {
if ((isInherentlyImmutableType(source) || Symbols.isFlagOn(source.flags, Flags.READONLY))) {
return true;
}
if (isAssignable(source, symTable.anyAndReadonlyOrError, unresolvedTypes)) {
return true;
}
}
if (sourceTag == TypeTags.READONLY && isAssignable(symTable.anyAndReadonlyOrError, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {
BRecordType recordType = (BRecordType) source;
return isAssignableRecordType(recordType, target, unresolvedTypes);
}
if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) {
return isAssignableMapType((BMapType) source, (BRecordType) target);
}
if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {
return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),
unresolvedTypes);
}
if (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {
return isAssignableTableType((BTableType) source, (BTableType) target, unresolvedTypes);
}
if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {
return isAssignableStreamType((BStreamType) source, (BStreamType) target, unresolvedTypes);
}
if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {
return true;
}
if (sourceTag == TypeTags.FINITE) {
return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);
}
if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&
isAssignableToUnionType(source, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.JSON) {
if (sourceTag == TypeTags.JSON) {
return true;
}
if (sourceTag == TypeTags.ARRAY) {
return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.MAP) {
return isAssignable(((BMapType) source).constraint, target, unresolvedTypes);
}
if (sourceTag == TypeTags.RECORD) {
return isAssignableRecordType((BRecordType) source, target, unresolvedTypes);
}
}
if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {
if (((BFutureType) target).constraint.tag == TypeTags.NONE) {
return true;
}
return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {
if (((BMapType) target).constraint.tag == TypeTags.ANY &&
((BMapType) source).constraint.tag != TypeTags.UNION) {
return true;
}
return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);
}
if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)
&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {
return checkStructEquivalency(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {
return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {
return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {
return isTupleTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {
return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());
}
return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&
isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
private boolean isMutable(BType type) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return false;
}
if (type.tag != TypeTags.UNION) {
return true;
}
BUnionType unionType = (BUnionType) type;
for (BType memberType : unionType.getMemberTypes()) {
if (!Symbols.isFlagOn(memberType.flags, Flags.READONLY)) {
return true;
}
}
unionType.flags |= Flags.READONLY;
BTypeSymbol tsymbol = unionType.tsymbol;
if (tsymbol != null) {
tsymbol.flags |= Flags.READONLY;
}
return false;
}
private boolean isParameterizedTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
BType resolvedSourceType = unifier.build(source);
if (target.tag != TypeTags.PARAMETERIZED_TYPE) {
return isAssignable(resolvedSourceType, target, unresolvedTypes);
}
if (((BParameterizedType) source).paramIndex != ((BParameterizedType) target).paramIndex) {
return false;
}
return isAssignable(resolvedSourceType, unifier.build(target), unresolvedTypes);
}
private boolean isAssignableRecordType(BRecordType recordType, BType type, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(recordType, type);
if (!unresolvedTypes.add(pair)) {
return true;
}
BType targetType;
switch (type.tag) {
case TypeTags.MAP:
targetType = ((BMapType) type).constraint;
break;
case TypeTags.JSON:
targetType = type;
break;
default:
throw new IllegalArgumentException("Incompatible target type: " + type.toString());
}
return recordFieldsAssignableToType(recordType, targetType, unresolvedTypes);
}
private boolean isAssignableStreamType(BStreamType sourceStreamType, BStreamType targetStreamType,
Set<TypePair> unresolvedTypes) {
return isAssignable(sourceStreamType.constraint, targetStreamType.constraint, unresolvedTypes)
&& isAssignable(sourceStreamType.error, targetStreamType.error, unresolvedTypes);
}
private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType,
Set<TypePair> unresolvedTypes) {
for (BField field : recordType.fields.values()) {
if (!isAssignable(field.type, targetType, unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return isAssignable(recordType.restFieldType, targetType, unresolvedTypes);
}
return true;
}
private boolean isAssignableTableType(BTableType sourceTableType, BTableType targetTableType,
Set<TypePair> unresolvedTypes) {
if (!isAssignable(sourceTableType.constraint, targetTableType.constraint, unresolvedTypes)) {
return false;
}
if (targetTableType.keyTypeConstraint == null && targetTableType.fieldNameList == null) {
return true;
}
if (targetTableType.keyTypeConstraint != null) {
if (sourceTableType.keyTypeConstraint != null &&
(isAssignable(sourceTableType.keyTypeConstraint, targetTableType.keyTypeConstraint,
unresolvedTypes))) {
return true;
}
if (sourceTableType.fieldNameList == null) {
return false;
}
List<BType> fieldTypes = new ArrayList<>();
sourceTableType.fieldNameList.forEach(field -> fieldTypes
.add(getTableConstraintField(sourceTableType.constraint, field).type));
if (fieldTypes.size() == 1) {
return isAssignable(fieldTypes.get(0), targetTableType.keyTypeConstraint, unresolvedTypes);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return isAssignable(tupleType, targetTableType.keyTypeConstraint, unresolvedTypes);
}
return targetTableType.fieldNameList.equals(sourceTableType.fieldNameList);
}
BField getTableConstraintField(BType constraintType, String fieldName) {
switch (constraintType.tag) {
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) constraintType).getFields();
return fieldList.get(fieldName);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) constraintType;
Set<BType> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isAssignable(field.type, fields.get(0).type) &&
isAssignable(fields.get(0).type, field.type))) {
return fields.get(0);
}
break;
case TypeTags.INTERSECTION:
return getTableConstraintField(((BIntersectionType) constraintType).effectiveType, fieldName);
}
return null;
}
private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) {
if (targetRecType.sealed) {
return false;
}
for (BField field : targetRecType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
return false;
}
if (hasIncompatibleReadOnlyFlags(field.symbol.flags, sourceMapType.flags)) {
return false;
}
if (!isAssignable(sourceMapType.constraint, field.type)) {
return false;
}
}
return isAssignable(sourceMapType.constraint, targetRecType.restFieldType);
}
private boolean hasIncompatibleReadOnlyFlags(long targetFlags, long sourceFlags) {
return Symbols.isFlagOn(targetFlags, Flags.READONLY) && !Symbols.isFlagOn(sourceFlags, Flags.READONLY);
}
private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) {
if (target == symTable.errorType) {
return true;
}
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
return isAssignable(source.detailType, target.detailType, unresolvedTypes)
&& target.typeIdSet.isAssignableFrom(source.typeIdSet);
}
private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) {
int sourceTag = sourceType.tag;
int targetTag = targetType.tag;
if (targetTag == TypeTags.XML) {
BXMLType target = (BXMLType) targetType;
if (target.constraint != null) {
if (TypeTags.isXMLNonSequenceType(sourceTag)) {
return isAssignable(sourceType, target.constraint, unresolvedTypes);
}
BXMLType source = (BXMLType) sourceType;
if (source.constraint.tag == TypeTags.NEVER) {
if (sourceTag == targetTag) {
return true;
}
return isAssignable(source, target.constraint, unresolvedTypes);
}
return isAssignable(source.constraint, target.constraint, unresolvedTypes);
}
return true;
}
if (sourceTag == TypeTags.XML) {
BXMLType source = (BXMLType) sourceType;
if (targetTag == TypeTags.XML_TEXT) {
if (source.constraint != null) {
return source.constraint.tag == TypeTags.NEVER;
}
return false;
}
}
return sourceTag == targetTag;
}
private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.restType == null && rhsTupleType.restType != null) {
return false;
}
if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
if (lhsTupleType.restType != null && rhsTupleType.restType != null) {
if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {
return false;
}
}
if (lhsTupleType.tupleTypes.size() > rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {
BType lhsType = (lhsTupleType.tupleTypes.size() > i)
? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;
if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {
return false;
}
}
return true;
}
private boolean checkAllTupleMembersBelongNoType(List<BType> tupleTypes) {
boolean isNoType = false;
for (BType memberType : tupleTypes) {
switch (memberType.tag) {
case TypeTags.NONE:
isNoType = true;
break;
case TypeTags.TUPLE:
isNoType = checkAllTupleMembersBelongNoType(((BTupleType) memberType).tupleTypes);
if (!isNoType) {
return false;
}
break;
default:
return false;
}
}
return isNoType;
}
private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,
Set<TypePair> unresolvedTypes) {
if (target.state != BArrayState.OPEN
&& (source.restType != null || source.tupleTypes.size() != target.size)) {
return false;
}
List<BType> sourceTypes = new ArrayList<>(source.tupleTypes);
if (source.restType != null) {
sourceTypes.add(source.restType);
}
return sourceTypes.stream()
.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));
}
private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,
Set<TypePair> unresolvedTypes) {
BType restType = target.restType;
List<BType> tupleTypes = target.tupleTypes;
if (source.state == BArrayState.OPEN) {
if (restType == null || !tupleTypes.isEmpty()) {
return false;
}
return isAssignable(source.eType, restType, unresolvedTypes);
}
int targetTupleMemberSize = tupleTypes.size();
int sourceArraySize = source.size;
if (targetTupleMemberSize > sourceArraySize) {
return false;
}
if (restType == null && targetTupleMemberSize < sourceArraySize) {
return false;
}
BType sourceElementType = source.eType;
for (BType memType : tupleTypes) {
if (!isAssignable(sourceElementType, memType, unresolvedTypes)) {
return false;
}
}
if (restType == null) {
return true;
}
return sourceArraySize == targetTupleMemberSize || isAssignable(sourceElementType, restType, unresolvedTypes);
}
private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) {
BType sourceElementType = source.getElementType();
if (target.tag == TypeTags.ARRAY) {
BArrayType targetArrayType = (BArrayType) target;
BType targetElementType = targetArrayType.getElementType();
if (targetArrayType.state == BArrayState.OPEN) {
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
}
if (targetArrayType.size != source.size) {
return false;
}
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
} else if (target.tag == TypeTags.JSON) {
return isAssignable(sourceElementType, target, unresolvedTypes);
} else if (target.tag == TypeTags.ANYDATA) {
return isAssignable(sourceElementType, target, unresolvedTypes);
}
return false;
}
private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (Symbols.isFlagOn(target.flags, Flags.ANY_FUNCTION)) {
return true;
}
if (containsTypeParams(target)) {
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
BType sourceParam = source.paramTypes.get(i);
BType targetParam = target.paramTypes.get(i);
boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);
if (isTypeParam) {
if (!isAssignable(sourceParam, targetParam)) {
return false;
}
} else {
if (!isAssignable(targetParam, sourceParam)) {
return false;
}
}
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));
}
public boolean isInherentlyImmutableType(BType type) {
if (isValueType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.XML_TEXT:
case TypeTags.FINITE:
case TypeTags.READONLY:
case TypeTags.NIL:
case TypeTags.ERROR:
case TypeTags.INVOKABLE:
case TypeTags.TYPEDESC:
case TypeTags.HANDLE:
return true;
case TypeTags.XML:
return ((BXMLType) type).constraint.tag == TypeTags.NEVER;
}
return false;
}
boolean isSelectivelyImmutableType(BType type) {
return isSelectivelyImmutableType(type, new HashSet<>(), false);
}
boolean isSelectivelyImmutableType(BType type, boolean forceCheck) {
return isSelectivelyImmutableType(type, new HashSet<>(), forceCheck);
}
public boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes) {
return isSelectivelyImmutableType(type, unresolvedTypes, false);
}
private boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes, boolean forceCheck) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, forceCheck);
}
private boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, Set<BType> unresolvedTypes,
boolean forceCheck) {
if (isInherentlyImmutableType(type) || !(type instanceof SelectivelyImmutableReferenceType)) {
return false;
}
if (!unresolvedTypes.add(type)) {
return true;
}
if (!forceCheck && ((SelectivelyImmutableReferenceType) type).getImmutableType() != null) {
return true;
}
switch (type.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_COMMENT:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_PI:
return true;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes, forceCheck);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
for (BType tupMemType : tupleType.tupleTypes) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType tupRestType = tupleType.restType;
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes, forceCheck);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType recordRestType = recordType.restFieldType;
if (recordRestType == null || recordRestType == symTable.noType) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes, forceCheck);
case TypeTags.MAP:
BType constraintType = ((BMapType) type).constraint;
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes, forceCheck);
case TypeTags.OBJECT:
BObjectType objectType = (BObjectType) type;
for (BField field : objectType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
return true;
case TypeTags.TABLE:
BType tableConstraintType = ((BTableType) type).constraint;
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes, forceCheck);
case TypeTags.UNION:
boolean readonlyIntersectionExists = false;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, unresolvedTypes, forceCheck)) {
readonlyIntersectionExists = true;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION:
return isSelectivelyImmutableType(((BIntersectionType) type).effectiveType, unresolvedTypes,
forceCheck);
}
return false;
}
private boolean containsTypeParams(BInvokableType type) {
boolean hasParameterizedTypes = type.paramTypes.stream()
.anyMatch(t -> {
if (t.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) t);
}
return TypeParamAnalyzer.isTypeParam(t);
});
if (hasParameterizedTypes) {
return hasParameterizedTypes;
}
if (type.retType.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) type.retType);
}
return TypeParamAnalyzer.isTypeParam(type.retType);
}
private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) {
return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);
}
private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (Symbols.isFlagOn(target.flags, Flags.ANY_FUNCTION) && Symbols.isFlagOn(source.flags, Flags.ANY_FUNCTION)) {
return true;
}
if (Symbols.isFlagOn(target.flags, Flags.ANY_FUNCTION) || Symbols.isFlagOn(source.flags, Flags.ANY_FUNCTION)) {
return false;
}
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {
return false;
}
}
if ((source.restType != null && target.restType == null) ||
target.restType != null && source.restType == null) {
return false;
} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {
return false;
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
private boolean hasIncompatibleIsolatedFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(target.flags, Flags.ISOLATED) && !Symbols.isFlagOn(source.flags, Flags.ISOLATED);
}
private boolean hasIncompatibleTransactionalFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(source.flags, Flags.TRANSACTIONAL) &&
!Symbols.isFlagOn(target.flags, Flags.TRANSACTIONAL);
}
public boolean isSameArrayType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType lhsArrayType = (BArrayType) target;
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);
if (lhsArrayType.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameTypeElements;
}
return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements;
}
public boolean isSameStreamType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.STREAM || source.tag != TypeTags.STREAM) {
return false;
}
BStreamType lhsStreamType = (BStreamType) target;
BStreamType rhsStreamType = (BStreamType) source;
return isSameType(lhsStreamType.constraint, rhsStreamType.constraint, unresolvedTypes)
&& isSameType(lhsStreamType.error, rhsStreamType.error, unresolvedTypes);
}
public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {
return lhsArrayType.size == rhsArrayType.size;
}
public boolean checkStructEquivalency(BType rhsType, BType lhsType) {
return checkStructEquivalency(rhsType, lhsType, new HashSet<>());
}
private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(rhsType, lhsType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {
return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);
}
if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);
}
return false;
}
public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.flags, Flags.ISOLATED) && !Symbols.isFlagOn(rhsType.flags, Flags.ISOLATED)) {
return false;
}
BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;
BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;
List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs;
List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;
int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);
int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);
boolean isLhsAService = Symbols.isService(lhsStructSymbol);
if (isLhsAService && !Symbols.isService(rhsStructSymbol)) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {
return false;
}
for (BField bField : lhsType.fields.values()) {
if (Symbols.isPrivate(bField.symbol)) {
return false;
}
}
for (BAttachedFunction func : lhsFuncs) {
if (Symbols.isPrivate(func.symbol)) {
return false;
}
}
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null ||
!isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) ||
!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
}
for (BAttachedFunction lhsFunc : lhsFuncs) {
if (lhsFunc == lhsStructSymbol.initializerFunc) {
continue;
}
if (isLhsAService && Symbols.isResource(lhsFunc.symbol)) {
continue;
}
BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {
return false;
}
if (Symbols.isRemote(lhsFunc.symbol) != Symbols.isRemote(rhsFunc.symbol)) {
return false;
}
}
return lhsType.typeIdSet.isAssignableFrom(rhsType.typeIdSet);
}
private int getObjectFuncCount(BObjectTypeSymbol sym) {
if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {
return sym.attachedFuncs.size() - 1;
}
return sym.attachedFuncs.size();
}
public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) {
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {
return false;
}
return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);
}
public void setInputClauseTypedBindingPatternType(BLangInputClause bLangInputClause) {
if (bLangInputClause.collection == null) {
return;
}
BType collectionType = bLangInputClause.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
BXMLType xmlType = (BXMLType) collectionType;
varType = xmlType.constraint;
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
break;
case TypeTags.OBJECT:
if (!isAssignable(bLangInputClause.collection.type, symTable.iterableType)) {
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.INVALID_ITERABLE_OBJECT_TYPE,
bLangInputClause.collection.type, symTable.iterableType);
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
}
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
bLangInputClause.varType = ((BRecordType) bLangInputClause.resultType).fields.get("value").type;
return;
}
case TypeTags.SEMANTIC_ERROR:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
default:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
bLangInputClause.varType = varType;
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
}
public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)) {
return getVarTypeFromIteratorFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {
if (!candidateIteratorFunc.type.paramTypes.isEmpty()) {
return null;
}
BType returnType = candidateIteratorFunc.type.retType;
return getVarTypeFromIteratorFuncReturnType(returnType);
}
public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {
BObjectTypeSymbol objectTypeSymbol;
if (returnType.tag != TypeTags.OBJECT) {
return null;
}
objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {
return getVarTypeFromNextFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {
BType returnType;
if (!nextFunc.type.paramTypes.isEmpty()) {
return null;
}
returnType = nextFunc.type.retType;
if (checkNextFuncReturnType(returnType)) {
return (BUnionType) returnType;
}
return null;
}
private boolean checkNextFuncReturnType(BType returnType) {
if (returnType.tag != TypeTags.UNION) {
return false;
}
List<BType> types = getAllTypes(returnType);
boolean containsCompletionType = types.removeIf(type -> type.tag == TypeTags.NIL);
containsCompletionType = types.removeIf(type -> type.tag == TypeTags.ERROR) || containsCompletionType;
if (!containsCompletionType) {
return false;
}
if (types.size() != 1) {
return false;
}
if (types.get(0).tag != TypeTags.RECORD) {
return false;
}
BRecordType recordType = (BRecordType) types.get(0);
return checkRecordTypeInNextFuncReturnType(recordType);
}
private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {
if (!recordType.sealed) {
return false;
}
if (recordType.fields.size() != 1) {
return false;
}
return recordType.fields.containsKey(BLangCompilerConstants.VALUE_FIELD);
}
private BRecordType getRecordType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.RECORD) {
return (BRecordType) member;
}
}
return null;
}
public BErrorType getErrorType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.ERROR) {
return (BErrorType) member;
} else if (member.tag == TypeTags.UNION) {
BErrorType e = getErrorType((BUnionType) member);
if (e != null) {
return e;
}
}
}
return null;
}
public BType getResultTypeOfNextInvocation(BObjectType iteratorType) {
BAttachedFunction nextFunc = getAttachedFuncFromObject(iteratorType, BLangCompilerConstants.NEXT_FUNC);
return Objects.requireNonNull(nextFunc).type.retType;
}
public BAttachedFunction getAttachedFuncFromObject(BObjectType objectType, String funcName) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) objectType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (funcName.equals(bAttachedFunction.funcName.value)) {
return bAttachedFunction;
}
}
return null;
}
public BType inferRecordFieldType(BRecordType recordType) {
Map<String, BField> fields = recordType.fields;
BUnionType unionType = BUnionType.create(null);
if (!recordType.sealed) {
unionType.add(recordType.restFieldType);
} else if (fields.size() == 0) {
unionType.add(symTable.neverType);
}
for (BField field : fields.values()) {
if (isAssignable(field.type, unionType)) {
continue;
}
if (isAssignable(unionType, field.type)) {
unionType = BUnionType.create(null);
}
unionType.add(field.type);
}
if (unionType.getMemberTypes().size() > 1) {
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, recordType.tsymbol.pkgID, null,
recordType.tsymbol.owner, symTable.builtinPos, VIRTUAL);
return unionType;
}
return unionType.getMemberTypes().iterator().next();
}
/**
* Enum to represent type test result.
*
* @since 1.2.0
*/
enum TypeTestResult {
NOT_FOUND,
TRUE,
FALSE
}
TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {
int targetTag = targetType.tag;
int actualTag = actualType.tag;
if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.BOOLEAN:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.STRING:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {
return TypeTestResult.FALSE;
}
break;
}
}
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY ||
targetTag == TypeTags.READONLY) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.ANYDATA:
case TypeTags.TYPEDESC:
if (targetTag == TypeTags.ANY) {
return TypeTestResult.TRUE;
}
break;
default:
}
if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {
return TypeTestResult.FALSE;
}
if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)
&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {
return checkBuiltInIntSubtypeWidenPossible(actualType, targetType);
}
if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {
return TypeTestResult.TRUE;
}
return TypeTestResult.NOT_FOUND;
}
private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {
int actualTag = actualType.tag;
switch (targetType.tag) {
case TypeTags.INT:
if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED32_INT:
if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||
actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED16_INT:
if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED32_INT:
if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED16_INT:
if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.BYTE:
if (actualTag == TypeTags.UNSIGNED8_INT) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED8_INT:
if (actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
}
return TypeTestResult.NOT_FOUND;
}
public boolean isImplicityCastable(BType actualType, BType targetType) {
/* The word Builtin refers for Compiler known types. */
BType newTargetType = targetType;
if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {
newTargetType = symTable.anyType;
} else if (targetType.tag == TypeTags.INTERSECTION) {
newTargetType = ((BIntersectionType) targetType).effectiveType;
}
TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);
if (result != TypeTestResult.NOT_FOUND) {
return result == TypeTestResult.TRUE;
}
if (isValueType(targetType) &&
(actualType.tag == TypeTags.FINITE ||
(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()
.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {
return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT
|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;
} else if (targetType.tag == TypeTags.ERROR
&& (actualType.tag == TypeTags.UNION
&& isAllErrorMembers((BUnionType) actualType))) {
return true;
}
return false;
}
public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType, SymbolEnv env) {
if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||
sourceType == targetType) {
return true;
}
IntersectionContext intersectionContext = IntersectionContext.compilerInternalIntersectionTestContext();
BType errorIntersection = getTypeIntersection(intersectionContext, sourceType, symTable.errorType, env);
if (errorIntersection != symTable.semanticError &&
getTypeIntersection(intersectionContext, symTable.errorType, targetType, env)
== symTable.semanticError) {
return false;
}
if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {
return true;
}
if (isNumericConversionPossible(expr, sourceType, targetType)) {
return true;
}
if (sourceType.tag == TypeTags.ANY && targetType.tag == TypeTags.READONLY) {
return true;
}
boolean validTypeCast = false;
if (sourceType instanceof BUnionType) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType, env,
intersectionContext)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType instanceof BUnionType) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType, env,
intersectionContext)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (sourceType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (validTypeCast) {
if (isValueType(sourceType)) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
}
return true;
}
return false;
}
boolean isNumericConversionPossible(BLangExpression expr, BType sourceType,
BType targetType) {
final boolean isSourceNumericType = isBasicNumericType(sourceType);
final boolean isTargetNumericType = isBasicNumericType(targetType);
if (isSourceNumericType && isTargetNumericType) {
return true;
}
if (targetType.tag == TypeTags.UNION) {
HashSet<Integer> typeTags = new HashSet<>();
for (BType bType : ((BUnionType) targetType).getMemberTypes()) {
if (isBasicNumericType(bType)) {
typeTags.add(bType.tag);
if (typeTags.size() > 1) {
return false;
}
}
}
}
if (!isTargetNumericType && targetType.tag != TypeTags.UNION) {
return false;
}
if (isSourceNumericType) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
return true;
}
switch (sourceType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
for (BType memType : ((BUnionType) sourceType).getMemberTypes()) {
if (isBasicNumericType(memType) ||
(memType.tag == TypeTags.FINITE &&
finiteTypeContainsNumericTypeValues((BFiniteType) memType))) {
return true;
}
}
break;
case TypeTags.FINITE:
if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {
return true;
}
break;
}
return false;
}
private boolean isAllErrorMembers(BUnionType actualType) {
return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));
}
public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {
if (!isImplicityCastable(actualType, expType)) {
return;
}
BLangTypeConversionExpr implicitConversionExpr =
(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
implicitConversionExpr.pos = expr.pos;
implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;
implicitConversionExpr.type = expType;
implicitConversionExpr.targetType = expType;
implicitConversionExpr.internal = true;
expr.impConversionExpr = implicitConversionExpr;
}
public BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
public boolean checkListenerCompatibilityAtServiceDecl(BType type) {
if (type.tag == TypeTags.UNION) {
int listenerCompatibleTypeCount = 0;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (memberType.tag != TypeTags.ERROR) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
listenerCompatibleTypeCount++;
}
}
return listenerCompatibleTypeCount > 0;
}
return checkListenerCompatibility(type);
}
public boolean checkListenerCompatibility(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
for (BType memberType : unionType.getMemberTypes()) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
}
return true;
}
if (type.tag != TypeTags.OBJECT) {
return false;
}
BObjectType rhsType = (BObjectType) type;
List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;
ListenerValidationModel listenerValidationModel = new ListenerValidationModel(this, symTable);
return listenerValidationModel.checkMethods(rhsFuncs);
}
public boolean isValidErrorDetailType(BType detailType) {
switch (detailType.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
return isAssignable(detailType, symTable.detailType);
}
return false;
}
private boolean isSealedRecord(BType recordType) {
return recordType.getKind() == TypeKind.RECORD && ((BRecordType) recordType).sealed;
}
private boolean isNullable(BType fieldType) {
return fieldType.isNullable();
}
private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType t, BType s) {
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
if (t == s) {
return true;
}
return t.tag == s.tag;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE &&
isSameType(t.constraint, ((BFutureType) s).constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes);
}
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (((!t.tupleTypes.isEmpty() && checkAllTupleMembersBelongNoType(t.tupleTypes)) ||
(t.restType != null && t.restType.tag == TypeTags.NONE)) &&
!(s.tag == TypeTags.ARRAY && ((BArrayType) s).state == BArrayState.OPEN)) {
return true;
}
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
BType sourceRestType = source.restType;
BType targetRestType = t.restType;
if ((sourceRestType == null || targetRestType == null) && sourceRestType != targetRestType) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null || targetRestType == symTable.noType) {
return true;
}
return isSameType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BStreamType t, BType s) {
return s.tag == TypeTags.STREAM && isSameStreamType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
if (inOrderedType) {
inOrderedType = false;
return isSimpleBasicType(s.tag) && checkUnionHasSameFiniteType(tUnionType.getMemberTypes(), s);
}
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size());
sourceTypes.add(sUnionType);
sourceTypes.addAll(sUnionType.getMemberTypes());
targetTypes.add(tUnionType);
targetTypes.addAll(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
if (inOrderedType) {
inOrderedType = false;
return checkValueSpaceHasSameType(t, s);
}
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
};
private boolean checkUnionHasSameFiniteType(LinkedHashSet<BType> memberTypes, BType baseType) {
for (BType type : memberTypes) {
if (type.tag != TypeTags.FINITE) {
return false;
}
boolean isValueSpaceSameType = false;
for (BLangExpression expr : ((BFiniteType) type).getValueSpace()) {
isValueSpaceSameType = isSameType(expr.type, baseType);
if (!isValueSpaceSameType) {
break;
}
}
return isValueSpaceSameType;
}
return false;
}
private boolean checkValueSpaceHasSameType(BFiniteType finiteType, BType baseType) {
if (baseType.tag == TypeTags.FINITE) {
return finiteType == baseType;
}
boolean isValueSpaceSameType = false;
for (BLangExpression expr : finiteType.getValueSpace()) {
isValueSpaceSameType = isSameType(expr.type, baseType);
if (!isValueSpaceSameType) {
break;
}
}
return isValueSpaceSameType;
}
private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
if (!Symbols.isOptional(lhsField.symbol)) {
return false;
}
continue;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
return rhsFields.entrySet().stream().allMatch(
fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));
}
private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc,
Set<TypePair> unresolvedTypes) {
return rhsFuncList.stream()
.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))
.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))
.findFirst()
.orElse(null);
}
private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {
if (Symbols.isPrivate(lhsSym)) {
return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)
&& lhsSym.owner.name.equals(rhsSym.owner.name);
} else if (Symbols.isPublic(lhsSym)) {
return Symbols.isPublic(rhsSym);
}
return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);
}
private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
if (source.tag == TypeTags.UNION && ((BUnionType) source).isCyclic) {
unresolvedTypes.add(pair);
}
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION || source.tag == TypeTags.JSON || source.tag == TypeTags.ANYDATA) {
sourceTypes.addAll(getEffectiveMemberTypes((BUnionType) source));
} else {
sourceTypes.add(source);
}
boolean targetIsAUnion = false;
if (target.tag == TypeTags.UNION) {
targetIsAUnion = true;
targetTypes.addAll(getEffectiveMemberTypes((BUnionType) target));
} else {
targetTypes.add(target);
}
var sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType sMember = sourceIterator.next();
if (sMember.tag == TypeTags.NEVER) {
sourceIterator.remove();
continue;
}
if (sMember.tag == TypeTags.FINITE && isAssignable(sMember, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (sMember.tag == TypeTags.XML &&
isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (!isValueType(sMember)) {
if (!targetIsAUnion) {
continue;
}
BUnionType targetUnion = (BUnionType) target;
if (sMember instanceof BUnionType) {
BUnionType sUnion = (BUnionType) sMember;
if (sUnion.isCyclic && targetUnion.isCyclic) {
unresolvedTypes.add(new TypePair(sUnion, targetUnion));
if (isAssignable(sUnion, targetUnion, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
}
}
if (sMember.tag == TypeTags.READONLY) {
unresolvedTypes.add(new TypePair(sMember, targetUnion));
if (isAssignable(sMember, targetUnion, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
}
continue;
}
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
while (targetIterator.hasNext()) {
BType t = targetIterator.next();
if (isAssignable(sMember, t, unresolvedTypes)) {
sourceIterator.remove();
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType sourceMember = sourceIterator.next();
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
boolean selfReferencedSource = (sourceMember != source) &&
isSelfReferencedStructuredType(source, sourceMember);
while (targetIterator.hasNext()) {
BType targetMember = targetIterator.next();
boolean selfReferencedTarget = isSelfReferencedStructuredType(target, targetMember);
if (selfReferencedTarget && selfReferencedSource && (sourceMember.tag == targetMember.tag)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
if (isAssignable(sourceMember, targetMember, unresolvedTypes)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
unresolvedTypes.add(pair);
return true;
}
public boolean isSelfReferencedStructuredType(BType source, BType s) {
if (source == s) {
return true;
}
if (s.tag == TypeTags.ARRAY) {
return isSelfReferencedStructuredType(source, ((BArrayType) s).eType);
}
if (s.tag == TypeTags.MAP) {
return isSelfReferencedStructuredType(source, ((BMapType) s).constraint);
}
if (s.tag == TypeTags.TABLE) {
return isSelfReferencedStructuredType(source, ((BTableType) s).constraint);
}
return false;
}
public BType updateSelfReferencedWithNewType(BType source, BType s, BType target) {
if (s.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) s;
if (arrayType.eType == source) {
return new BArrayType(target, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
}
}
if (s.tag == TypeTags.MAP) {
BMapType mapType = (BMapType) s;
if (mapType.constraint == source) {
return new BMapType(mapType.tag, target, mapType.tsymbol, mapType.flags);
}
}
if (s.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) s;
if (tableType.constraint == source) {
return new BTableType(tableType.tag, target, tableType.tsymbol,
tableType.flags);
} else if (tableType.constraint instanceof BMapType) {
return updateSelfReferencedWithNewType(source, (BMapType) tableType.constraint, target);
}
}
return s;
}
public static void fixSelfReferencingSameUnion(BType originalMemberType, BUnionType origUnionType,
BType immutableMemberType, BUnionType newImmutableUnion,
LinkedHashSet<BType> readOnlyMemTypes) {
boolean sameMember = originalMemberType == immutableMemberType;
if (originalMemberType.tag == TypeTags.ARRAY) {
var arrayType = (BArrayType) originalMemberType;
if (origUnionType == arrayType.eType) {
if (sameMember) {
BArrayType newArrayType = new BArrayType(newImmutableUnion, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
readOnlyMemTypes.add(newArrayType);
} else {
((BArrayType) immutableMemberType).eType = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.MAP) {
var mapType = (BMapType) originalMemberType;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol, mapType.flags);
readOnlyMemTypes.add(newMapType);
} else {
((BMapType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.TABLE) {
var tableType = (BTableType) originalMemberType;
if (origUnionType == tableType.constraint) {
if (sameMember) {
BTableType newTableType = new BTableType(tableType.tag, newImmutableUnion, tableType.tsymbol,
tableType.flags);
readOnlyMemTypes.add(newTableType);
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
return;
}
var immutableConstraint = ((BTableType) immutableMemberType).constraint;
if (tableType.constraint.tag == TypeTags.MAP) {
sameMember = tableType.constraint == immutableConstraint;
var mapType = (BMapType) tableType.constraint;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol,
mapType.flags);
((BTableType) immutableMemberType).constraint = newMapType;
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
}
readOnlyMemTypes.add(immutableMemberType);
}
}
} else {
readOnlyMemTypes.add(immutableMemberType);
}
}
private Set<BType> getEffectiveMemberTypes(BUnionType unionType) {
Set<BType> memTypes = new LinkedHashSet<>();
for (BType memberType : unionType.getMemberTypes()) {
switch (memberType.tag) {
case TypeTags.INTERSECTION:
BType effectiveType = ((BIntersectionType) memberType).effectiveType;
if (effectiveType.tag == TypeTags.UNION) {
memTypes.addAll(getEffectiveMemberTypes((BUnionType) effectiveType));
continue;
}
memTypes.add(effectiveType);
break;
case TypeTags.UNION:
memTypes.addAll(getEffectiveMemberTypes((BUnionType) memberType));
break;
default:
memTypes.add(memberType);
break;
}
}
return memTypes;
}
private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) {
if (targetType.tag == TypeTags.FINITE) {
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));
}
if (targetType.tag == TypeTags.UNION) {
List<BType> unionMemberTypes = getAllTypes(targetType);
return finiteType.getValueSpace().stream()
.allMatch(valueExpr -> unionMemberTypes.stream()
.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?
isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :
isAssignable(valueExpr.type, targetType, unresolvedTypes)));
}
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));
}
boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {
if (type.tag != TypeTags.FINITE) {
return false;
}
BFiniteType expType = (BFiniteType) type;
return expType.getValueSpace().stream().anyMatch(memberLiteral -> {
if (((BLangLiteral) memberLiteral).value == null) {
return literalExpr.value == null;
}
return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);
});
}
/**
* Method to check the literal assignability based on the types of the literals. For numeric literals the
* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple
* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both
* literals have same type and equivalent values.
*
* @param baseLiteral Literal based on which we check the assignability.
* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.
* @return true if assignable; false otherwise.
*/
boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {
if (baseLiteral.getKind() != candidateLiteral.getKind()) {
return false;
}
Object baseValue = baseLiteral.value;
Object candidateValue = candidateLiteral.value;
int candidateTypeTag = candidateLiteral.type.tag;
switch (baseLiteral.type.tag) {
case TypeTags.BYTE:
if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&
!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.INT:
if (candidateTypeTag == TypeTags.INT) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.FLOAT:
String baseValueStr = String.valueOf(baseValue);
String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;
if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {
return false;
}
double baseDoubleVal = Double.parseDouble(baseValueStr);
double candidateDoubleVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDoubleVal = ((Long) candidateValue).doubleValue();
return baseDoubleVal == candidateDoubleVal;
} else if (candidateTypeTag == TypeTags.FLOAT) {
candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));
return baseDoubleVal == candidateDoubleVal;
}
break;
case TypeTags.DECIMAL:
BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);
BigDecimal candidateDecimalVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||
candidateTypeTag == TypeTags.DECIMAL) {
if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {
return false;
}
candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
}
break;
default:
return baseValue.equals(candidateValue);
}
return false;
}
boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
boolean isCharLiteralValue(String literal) {
return (literal.codePoints().count() == 1);
}
/**
* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to
* the target type.
*
* @param finiteType the finite type
* @param targetType the target type
* @return a new finite type if at least one value in the value space of the specified finiteType is
* assignable to targetType (the same if all are assignable), else semanticError
*/
BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {
if (isAssignable(finiteType, targetType)) {
return finiteType;
}
Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream()
.filter(
expr -> isAssignable(expr.type, targetType) ||
isAssignableToFiniteType(targetType, (BLangLiteral) expr) ||
(targetType.tag == TypeTags.UNION &&
((BUnionType) targetType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,
(BLangLiteral) expr))))
.collect(Collectors.toSet());
if (matchingValues.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
finiteType.tsymbol.pkgID, null,
finiteType.tsymbol.owner, finiteType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
/**
* Method to retrieve a type representing all the member types of a union type that are assignable to
* the target type.
*
* @param intersectionContext
* @param unionType the union type
* @param targetType the target type
* @return a single type or a new union type if at least one member type of the union type is
* assignable to targetType, else semanticError
*/
BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType, SymbolEnv env,
IntersectionContext intersectionContext) {
List<BType> intersection = new LinkedList<>();
unionType.getMemberTypes().forEach(memType -> {
BType memberIntersectionType = getTypeIntersection(intersectionContext, memType, targetType, env);
if (memberIntersectionType != symTable.semanticError) {
intersection.add(memberIntersectionType);
}
});
if (intersection.isEmpty()) {
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.get(0);
} else {
return BUnionType.create(null, new LinkedHashSet<>(intersection));
}
}
boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {
if (!isPureType(lhsType) || !isPureType(rhsType)) {
return false;
}
if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {
return true;
}
Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType);
Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType);
return equalityIntersectionExists(lhsTypes, rhsTypes);
}
private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) {
if ((lhsTypes.contains(symTable.anydataType) &&
rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||
(rhsTypes.contains(symTable.anydataType) &&
lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {
return true;
}
boolean matchFound = lhsTypes
.stream()
.anyMatch(s -> rhsTypes
.stream()
.anyMatch(t -> isSameType(s, t)));
if (!matchFound) {
matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);
}
return matchFound;
}
/**
* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual
* maps/arrays.
*
* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]
*
* @param bType the type for which member types needs to be identified
* @return a set containing all the retrieved member types
*/
public Set<BType> expandAndGetMemberTypesRecursive(BType bType) {
Set<BType> memberTypes = new LinkedHashSet<>();
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.INT:
memberTypes.add(symTable.intType);
memberTypes.add(symTable.byteType);
break;
case TypeTags.FINITE:
BFiniteType expType = (BFiniteType) bType;
expType.getValueSpace().forEach(value -> {
memberTypes.add(value.type);
});
break;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) bType;
unionType.getMemberTypes().forEach(member -> {
memberTypes.addAll(expandAndGetMemberTypesRecursive(member));
});
break;
case TypeTags.ARRAY:
BType arrayElementType = ((BArrayType) bType).getElementType();
if (((BArrayType) bType).getSize() != -1) {
memberTypes.add(new BArrayType(arrayElementType));
}
if (arrayElementType.tag == TypeTags.UNION) {
Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);
elementUnionTypes.forEach(elementUnionType -> {
memberTypes.add(new BArrayType(elementUnionType));
});
}
memberTypes.add(bType);
break;
case TypeTags.MAP:
BType mapConstraintType = ((BMapType) bType).getConstraint();
if (mapConstraintType.tag == TypeTags.UNION) {
Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);
constraintUnionTypes.forEach(constraintUnionType -> {
memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));
});
}
memberTypes.add(bType);
break;
default:
memberTypes.add(bType);
}
return memberTypes;
}
private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {
if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {
return false;
}
List<BType> lhsMemberTypes = lhsType.getTupleTypes();
List<BType> rhsMemberTypes = rhsType.getTupleTypes();
for (int i = 0; i < lhsType.getTupleTypes().size(); i++) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),
expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {
return false;
}
}
return true;
}
private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) {
for (BType lhsMemberType : lhsTypes) {
switch (lhsMemberType.tag) {
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
break;
case TypeTags.JSON:
if (jsonEqualityIntersectionExists(rhsTypes)) {
return true;
}
break;
case TypeTags.TUPLE:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,
(BTupleType) lhsMemberType))) {
return true;
}
break;
case TypeTags.ARRAY:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),
expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,
(BTupleType) rhsMemberType))) {
return true;
}
break;
case TypeTags.MAP:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),
expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {
return true;
}
if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&
rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
mapRecordEqualityIntersectionExists((BMapType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
break;
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||
checkStructEquivalency(lhsMemberType, rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
recordEqualityIntersectionExists((BRecordType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&
jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
mapRecordEqualityIntersectionExists((BMapType) rhsMemberType,
(BRecordType) lhsMemberType))) {
return true;
}
break;
}
}
return false;
}
private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {
Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);
return tupleType.tupleTypes.stream()
.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,
expandAndGetMemberTypesRecursive(tupleMemType)));
}
private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {
Map<String, BField> lhsFields = lhsType.fields;
Map<String, BField> rhsFields = rhsType.fields;
List<Name> matchedFieldNames = new ArrayList<>();
for (BField lhsField : lhsFields.values()) {
if (rhsFields.containsKey(lhsField.name.value)) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(
rhsFields.get(lhsField.name.value).type))) {
return false;
}
matchedFieldNames.add(lhsField.getName());
} else {
if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {
break;
}
if (rhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(rhsType.restFieldType))) {
return false;
}
}
}
for (BField rhsField : rhsFields.values()) {
if (matchedFieldNames.contains(rhsField.getName())) {
continue;
}
if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {
if (lhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),
expandAndGetMemberTypesRecursive(lhsType.restFieldType))) {
return false;
}
}
}
return true;
}
private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {
Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) &&
!equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))) {
return false;
}
}
return true;
}
private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) {
for (BType type : typeSet) {
switch (type.tag) {
case TypeTags.MAP:
if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {
return true;
}
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
if (recordType.fields.values().stream()
.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||
!isAssignable(field.type, symTable.errorType))) {
return true;
}
break;
default:
if (isAssignable(type, symTable.jsonType)) {
return true;
}
}
}
return false;
}
public BType getRemainingMatchExprType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
case TypeTags.TUPLE:
return getRemainingType((BTupleType) originalType, typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BType typeToRemove) {
switch (typeToRemove.tag) {
case TypeTags.TUPLE:
return getRemainingType(originalType, (BTupleType) typeToRemove);
case TypeTags.ARRAY:
return getRemainingType(originalType, (BArrayType) typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BTupleType typeToRemove) {
if (originalType.restType != null) {
return originalType;
}
List<BType> originalTupleTypes = new ArrayList<>(originalType.tupleTypes);
List<BType> typesToRemove = new ArrayList<>(typeToRemove.tupleTypes);
if (originalTupleTypes.size() < typesToRemove.size()) {
return originalType;
}
List<BType> tupleTypes = new ArrayList<>();
for (int i = 0; i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typesToRemove.get(i)));
}
if (typeToRemove.restType == null) {
return new BTupleType(tupleTypes);
}
if (originalTupleTypes.size() == typesToRemove.size()) {
return originalType;
}
for (int i = typesToRemove.size(); i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typeToRemove.restType));
}
return new BTupleType(tupleTypes);
}
private BType getRemainingType(BTupleType originalType, BArrayType typeToRemove) {
BType eType = typeToRemove.eType;
List<BType> tupleTypes = new ArrayList<>();
for (BType tupleType : originalType.tupleTypes) {
tupleTypes.add(getRemainingMatchExprType(tupleType, eType));
}
BTupleType remainingType = new BTupleType(tupleTypes);
if (originalType.restType != null) {
remainingType.restType = getRemainingMatchExprType(originalType.restType, eType);
}
return remainingType;
}
public BType getRemainingType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
case TypeTags.READONLY:
return getRemainingType((BReadonlyType) originalType, typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BReadonlyType originalType, BType removeType) {
if (removeType.tag == TypeTags.ERROR) {
return symTable.anyAndReadonly;
}
return originalType;
}
public BType getTypeIntersection(IntersectionContext intersectionContext, BType lhsType, BType rhsType,
SymbolEnv env) {
List<BType> rhsTypeComponents = getAllTypes(rhsType);
LinkedHashSet<BType> intersection = new LinkedHashSet<>();
for (BType rhsComponent : rhsTypeComponents) {
BType it = getIntersection(intersectionContext, lhsType, env, rhsComponent);
if (it != null) {
intersection.add(it);
}
}
if (intersection.isEmpty()) {
if (lhsType.tag == TypeTags.NULL_SET) {
return lhsType;
}
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.toArray(new BType[0])[0];
} else {
return BUnionType.create(null, intersection);
}
}
private BType getIntersection(IntersectionContext intersectionContext, BType lhsType, SymbolEnv env, BType type) {
if (intersectionContext.preferNonGenerativeIntersection) {
if (isAssignable(type, lhsType)) {
return type;
} else if (isAssignable(lhsType, type)) {
return lhsType;
}
}
if (type.tag == TypeTags.ERROR && lhsType.tag == TypeTags.ERROR) {
BType intersectionType = getIntersectionForErrorTypes(intersectionContext, lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordIntersection(intersectionContext, (BRecordType) lhsType,
(BRecordType) type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordAndMapIntersection(intersectionContext.switchLeft(),
lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(intersectionContext.switchRight(),
type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (!intersectionContext.preferNonGenerativeIntersection && isAssignable(type, lhsType)) {
return type;
} else if (!intersectionContext.preferNonGenerativeIntersection && isAssignable(lhsType, type)) {
return lhsType;
} else if (lhsType.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (lhsType.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) lhsType, type, env,
intersectionContext);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) type, lhsType, env,
intersectionContext);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.NULL_SET) {
return type;
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(intersectionContext,
type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.ARRAY && lhsType.tag == TypeTags.TUPLE) {
BType intersectionType = createArrayAndTupleIntersection(intersectionContext,
(BArrayType) type, (BTupleType) lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.TUPLE && lhsType.tag == TypeTags.ARRAY) {
BType intersectionType = createArrayAndTupleIntersection(intersectionContext,
(BArrayType) lhsType, (BTupleType) type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
}
return null;
}
private BType createArrayAndTupleIntersection(IntersectionContext intersectionContext,
BArrayType arrayType, BTupleType tupleType, SymbolEnv env) {
List<BType> tupleMemberTypes = new ArrayList<>();
for (BType memberType : tupleType.tupleTypes) {
BType intersectionType = getTypeIntersection(intersectionContext, memberType, arrayType.eType, env);
if (intersectionType == symTable.semanticError) {
return symTable.semanticError;
}
tupleMemberTypes.add(intersectionType);
}
if (tupleType.restType == null) {
return new BTupleType(null, tupleMemberTypes);
}
BType restIntersectionType = getTypeIntersection(intersectionContext,
tupleType.restType, arrayType.eType, env);
if (restIntersectionType == symTable.semanticError) {
return new BTupleType(null, tupleMemberTypes);
}
return new BTupleType(null, tupleMemberTypes, restIntersectionType, 0);
}
private BType getIntersectionForErrorTypes(IntersectionContext intersectionContext,
BType lhsType, BType rhsType, SymbolEnv env) {
BType detailTypeOne = ((BErrorType) lhsType).detailType;
BType detailTypeTwo = ((BErrorType) rhsType).detailType;
if (!intersectionContext.compilerInternalIntersectionTest
&& (isSealedRecord(detailTypeOne) || isSealedRecord(detailTypeTwo))) {
return symTable.semanticError;
}
BType detailIntersectionType = getTypeIntersection(intersectionContext, detailTypeOne, detailTypeTwo, env);
if (detailIntersectionType == symTable.semanticError) {
return symTable.semanticError;
}
BErrorType intersectionErrorType = createErrorType(lhsType, rhsType, detailIntersectionType, env);
if (!intersectionContext.compilerInternalIntersectionTest) {
BTypeSymbol errorTSymbol = intersectionErrorType.tsymbol;
BLangErrorType bLangErrorType = TypeDefBuilderHelper.createBLangErrorType(symTable.builtinPos,
intersectionErrorType, env, anonymousModelHelper);
BLangTypeDefinition errorTypeDefinition = TypeDefBuilderHelper.addTypeDefinition(
intersectionErrorType, errorTSymbol, bLangErrorType, env);
errorTypeDefinition.pos = symTable.builtinPos;
}
return intersectionErrorType;
}
private BType createRecordIntersection(IntersectionContext diagnosticContext,
BRecordType recordTypeOne, BRecordType recordTypeTwo, SymbolEnv env) {
BRecordType newType = createAnonymousRecord(env);
if (!populateRecordFields(diagnosticContext.switchLeft(), newType, recordTypeOne, env,
getConstraint(recordTypeTwo)) ||
!populateRecordFields(diagnosticContext.switchRight(), newType, recordTypeTwo, env,
getConstraint(recordTypeOne))) {
return symTable.semanticError;
}
newType.restFieldType = getTypeIntersection(diagnosticContext, recordTypeOne.restFieldType,
recordTypeTwo.restFieldType, env);
if (newType.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
if (!diagnosticContext.compilerInternalIntersectionTest) {
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(
newType, env.enclPkg.packageID, symTable, symTable.builtinPos);
BLangTypeDefinition recordTypeDef = TypeDefBuilderHelper.addTypeDefinition(
newType, newType.tsymbol, recordTypeNode, env);
env.enclPkg.symbol.scope.define(newType.tsymbol.name, newType.tsymbol);
recordTypeDef.pos = symTable.builtinPos;
}
return newType;
}
private BType getConstraint(BRecordType recordType) {
if (recordType.sealed) {
return symTable.neverType;
}
return recordType.restFieldType;
}
private BRecordType createAnonymousRecord(SymbolEnv env) {
EnumSet<Flag> flags = EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.packageID, null,
env.scope.owner, null, VIRTUAL);
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false,
symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, symTable.builtinPos);
recordSymbol.scope = new Scope(recordSymbol);
BRecordType recordType = new BRecordType(recordSymbol);
recordType.tsymbol = recordSymbol;
recordSymbol.type = recordType;
return recordType;
}
private BType createRecordAndMapIntersection(IntersectionContext intersectionContext,
BType type, BType mapType, SymbolEnv env) {
BRecordType intersectionRecord = createAnonymousRecord(env);
if (!populateRecordFields(intersectionContext, intersectionRecord, type, env,
((BMapType) mapType).constraint)) {
return symTable.semanticError;
}
if (intersectionContext.compilerInternalIntersectionTest && ((BRecordType) type).sealed) {
return intersectionRecord;
}
intersectionRecord.restFieldType = getRestFieldIntersectionType(intersectionContext,
type, (BMapType) mapType, env);
if (intersectionRecord.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
if (!intersectionContext.compilerInternalIntersectionTest) {
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(
intersectionRecord, env.enclPkg.packageID, symTable, symTable.builtinPos);
BLangTypeDefinition recordTypeDef = TypeDefBuilderHelper.addTypeDefinition(
intersectionRecord, intersectionRecord.tsymbol, recordTypeNode, env);
env.enclPkg.symbol.scope.define(intersectionRecord.tsymbol.name, intersectionRecord.tsymbol);
recordTypeDef.pos = symTable.builtinPos;
}
return intersectionRecord;
}
private BType getRestFieldIntersectionType(IntersectionContext intersectionContext,
BType type, BMapType mapType, SymbolEnv env) {
if (type.tag == TypeTags.RECORD) {
return getTypeIntersection(intersectionContext,
((BRecordType) type).restFieldType, mapType.constraint, env);
} else {
return getTypeIntersection(intersectionContext,
((BMapType) type).constraint, mapType.constraint, env);
}
}
private BErrorType createErrorType(BType lhsType, BType rhsType, BType detailType, SymbolEnv env) {
BErrorType lhsErrorType = (BErrorType) lhsType;
BErrorType rhsErrorType = (BErrorType) rhsType;
BErrorType errorType = createErrorType(detailType, lhsType.flags, env);
errorType.tsymbol.flags |= rhsType.flags;
errorType.typeIdSet = BTypeIdSet.getIntersection(lhsErrorType.typeIdSet, rhsErrorType.typeIdSet);
return errorType;
}
public BErrorType createErrorType(BType detailType, long flags, SymbolEnv env) {
String name = anonymousModelHelper.getNextAnonymousIntersectionErrorTypeName(env.enclPkg.packageID);
BErrorTypeSymbol errorTypeSymbol = Symbols.createErrorSymbol(flags, names.fromString(name),
env.enclPkg.symbol.pkgID, null,
env.scope.owner, symTable.builtinPos, VIRTUAL);
errorTypeSymbol.scope = new Scope(errorTypeSymbol);
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
errorType.typeIdSet = BTypeIdSet.emptySet();
return errorType;
}
private boolean populateRecordFields(IntersectionContext diagnosticContext, BRecordType newType,
BType originalType, SymbolEnv env, BType constraint) {
BTypeSymbol intersectionRecordSymbol = newType.tsymbol;
if (originalType.getKind() != TypeKind.RECORD) {
return true;
}
BRecordType originalRecordType = (BRecordType) originalType;
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
for (BField origField : originalRecordType.fields.values()) {
org.wso2.ballerinalang.compiler.util.Name origFieldName = origField.name;
String nameString = origFieldName.value;
if (!validateRecordFieldDefaultValueForIntersection(diagnosticContext, origField, originalRecordType)) {
return false;
}
BType recordFieldType = validateRecordField(diagnosticContext, newType, origField, constraint, env);
if (recordFieldType == symTable.semanticError) {
return false;
}
BVarSymbol recordFieldSymbol = new BVarSymbol(origField.symbol.flags, origFieldName,
env.enclPkg.packageID, recordFieldType,
intersectionRecordSymbol, origField.pos, SOURCE);
if (recordFieldType.tag == TypeTags.INVOKABLE && recordFieldType.tsymbol != null) {
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) recordFieldType.tsymbol;
BInvokableSymbol invokableSymbol = (BInvokableSymbol) recordFieldSymbol;
invokableSymbol.params = tsymbol.params;
invokableSymbol.restParam = tsymbol.restParam;
invokableSymbol.retType = tsymbol.returnType;
invokableSymbol.flags = tsymbol.flags;
}
fields.put(nameString, new BField(origFieldName, null, recordFieldSymbol));
intersectionRecordSymbol.scope.define(origFieldName, recordFieldSymbol);
}
newType.fields.putAll(fields);
return true;
}
private boolean validateRecordFieldDefaultValueForIntersection(IntersectionContext diagnosticContext,
BField field, BRecordType recordType) {
if (field.symbol != null && field.symbol.isDefaultable && !diagnosticContext.compilerInternalIntersectionTest) {
diagnosticContext.logError(DiagnosticErrorCode.INTERSECTION_NOT_ALLOWED_WITH_TYPE, recordType, field.name);
return false;
}
return true;
}
private BType validateRecordField(IntersectionContext intersectionContext,
BRecordType newType, BField origField, BType constraint, SymbolEnv env) {
if (hasField(newType, origField)) {
return validateOverlappingFields(newType, origField);
}
if (constraint == null) {
return origField.type;
}
BType fieldType = getTypeIntersection(intersectionContext, origField.type, constraint, env);
if (fieldType != symTable.semanticError) {
return fieldType;
}
if (Symbols.isOptional(origField.symbol)) {
return null;
}
return symTable.semanticError;
}
private boolean hasField(BRecordType recordType, BField origField) {
return recordType.fields.containsKey(origField.name.value);
}
private BType validateOverlappingFields(BRecordType newType, BField origField) {
if (!hasField(newType, origField)) {
return origField.type;
}
BField overlappingField = newType.fields.get(origField.name.value);
if (isAssignable(overlappingField.type, origField.type)) {
return overlappingField.type;
}
if (isAssignable(origField.type, overlappingField.type)) {
return origField.type;
}
return symTable.semanticError;
}
private void removeErrorFromReadonlyType(List<BType> remainingTypes) {
Iterator<BType> remainingIterator = remainingTypes.listIterator();
boolean addAnyAndReadOnly = false;
while (remainingIterator.hasNext()) {
BType remainingType = remainingIterator.next();
if (remainingType.tag != TypeTags.READONLY) {
continue;
}
remainingIterator.remove();
addAnyAndReadOnly = true;
}
if (addAnyAndReadOnly) {
remainingTypes.add(symTable.anyAndReadonly);
}
}
private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) {
List<BType> remainingTypes = getAllTypes(originalType);
boolean hasErrorToRemove = false;
for (BType removeType : removeTypes) {
remainingTypes.removeIf(type -> isAssignable(type, removeType));
if (!hasErrorToRemove && removeType.tag == TypeTags.ERROR) {
hasErrorToRemove = true;
}
}
if (hasErrorToRemove) {
removeErrorFromReadonlyType(remainingTypes);
}
List<BType> finiteTypesToRemove = new ArrayList<>();
List<BType> finiteTypesToAdd = new ArrayList<>();
for (BType remainingType : remainingTypes) {
if (remainingType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) remainingType;
finiteTypesToRemove.add(finiteType);
BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);
if (remainingTypeWithMatchesRemoved != symTable.semanticError) {
finiteTypesToAdd.add(remainingTypeWithMatchesRemoved);
}
}
}
remainingTypes.removeAll(finiteTypesToRemove);
remainingTypes.addAll(finiteTypesToAdd);
if (remainingTypes.size() == 1) {
return remainingTypes.get(0);
}
if (remainingTypes.isEmpty()) {
return symTable.nullSet;
}
return BUnionType.create(null, new LinkedHashSet<>(remainingTypes));
}
private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) {
Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>();
for (BLangExpression valueExpr : originalType.getValueSpace()) {
boolean matchExists = false;
for (BType remType : removeTypes) {
if (isAssignable(valueExpr.type, remType) ||
isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {
matchExists = true;
break;
}
}
if (!matchExists) {
remainingValueSpace.add(valueExpr);
}
}
if (remainingValueSpace.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
originalType.tsymbol.pkgID, null,
originalType.tsymbol.owner, originalType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
public BType getSafeType(BType type, boolean liftNil, boolean liftError) {
switch (type.tag) {
case TypeTags.JSON:
return new BJSONType((BJSONType) type, false);
case TypeTags.ANY:
return new BAnyType(type.tag, type.tsymbol, false);
case TypeTags.ANYDATA:
return new BAnydataType((BAnydataType) type, false);
case TypeTags.READONLY:
return new BReadonlyType(type.tag, type.tsymbol, false);
}
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes());
BUnionType errorLiftedType = BUnionType.create(null, memTypes);
if (liftNil) {
errorLiftedType.remove(symTable.nilType);
}
if (liftError) {
errorLiftedType.remove(symTable.errorType);
}
if (errorLiftedType.getMemberTypes().size() == 1) {
return errorLiftedType.getMemberTypes().toArray(new BType[0])[0];
}
return errorLiftedType;
}
public List<BType> getAllTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Lists.of(type);
}
List<BType> memberTypes = new ArrayList<>();
((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));
return memberTypes;
}
public boolean isAllowedConstantType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.NIL:
return true;
case TypeTags.MAP:
return isAllowedConstantType(((BMapType) type).constraint);
case TypeTags.FINITE:
BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];
return isAllowedConstantType(finiteValue.type);
default:
return false;
}
}
public boolean isValidLiteral(BLangLiteral literal, BType targetType) {
BType literalType = literal.type;
if (literalType.tag == targetType.tag) {
return true;
}
switch (targetType.tag) {
case TypeTags.BYTE:
return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);
case TypeTags.DECIMAL:
return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;
case TypeTags.FLOAT:
return literalType.tag == TypeTags.INT;
case TypeTags.SIGNED32_INT:
return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);
case TypeTags.SIGNED16_INT:
return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);
case TypeTags.SIGNED8_INT:
return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED32_INT:
return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED16_INT:
return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED8_INT:
return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);
case TypeTags.CHAR_STRING:
return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);
default:
return false;
}
}
/**
* Validate if the return type of the given function is a subtype of `error?`, containing `()`.
*
* @param function The function of which the return type should be validated
* @param diagnosticCode The code to log if the return type is invalid
*/
public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {
BType returnType = function.returnTypeNode.type;
if (returnType.tag == TypeTags.NIL) {
return;
}
if (returnType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes();
if (returnType.isNullable() &&
memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {
return;
}
}
dlog.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.982.0
*/
private static class TypePair {
BType sourceType;
BType targetType;
public TypePair(BType sourceType, BType targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
@Override
public int hashCode() {
return Objects.hash(sourceType, targetType);
}
}
/**
* A functional interface for parameterizing the type of type checking that needs to be done on the source and
* target types.
*
* @since 0.995.0
*/
private interface TypeEqualityPredicate {
boolean test(BType source, BType target, Set<TypePair> unresolvedTypes);
}
public boolean hasFillerValue(BType type) {
switch (type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.NIL:
case TypeTags.TABLE:
case TypeTags.ANYDATA:
case TypeTags.MAP:
case TypeTags.ANY:
case TypeTags.NEVER:
return true;
case TypeTags.ARRAY:
return checkFillerValue((BArrayType) type);
case TypeTags.FINITE:
return checkFillerValue((BFiniteType) type);
case TypeTags.UNION:
return checkFillerValue((BUnionType) type);
case TypeTags.OBJECT:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD:
return checkFillerValue((BRecordType) type);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));
default:
if (TypeTags.isIntegerTypeTag(type.tag)) {
return true;
}
return false;
}
}
private boolean checkFillerValue(BObjectType type) {
if ((type.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
return false;
}
BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;
if (initFunction == null) {
return true;
}
if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {
return false;
}
for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {
if (!bVarSymbol.isDefaultable) {
return false;
}
}
return true;
}
/**
* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill
* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and
* the default value for that type should be a member of the union precondition : value space should have at least
* one element
*
* @param type BFiniteType union or finite
* @return boolean whether type has a valid filler value or not
*/
private boolean checkFillerValue(BFiniteType type) {
if (type.isNullable()) {
return true;
}
if (type.getValueSpace().size() == 1) {
return true;
}
Iterator iterator = type.getValueSpace().iterator();
BLangExpression firstElement = (BLangExpression) iterator.next();
boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);
while (iterator.hasNext()) {
BLangExpression value = (BLangExpression) iterator.next();
if (!isSameBasicType(value.type, firstElement.type)) {
return false;
}
if (!defaultFillValuePresent && isImplicitDefaultValue(value)) {
defaultFillValuePresent = true;
}
}
return defaultFillValuePresent;
}
private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) {
for (BLangExpression expression : valueSpace) {
if (isImplicitDefaultValue(expression)) {
return true;
}
}
return false;
}
private boolean checkFillerValue(BUnionType type) {
if (type.isNullable()) {
return true;
}
Set<BType> memberTypes = new HashSet<>();
boolean hasFillerValue = false;
boolean defaultValuePresent = false;
boolean finiteTypePresent = false;
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.FINITE) {
Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace());
memberTypes.addAll(uniqueValues);
if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) {
defaultValuePresent = true;
}
finiteTypePresent = true;
} else {
memberTypes.add(member);
}
if (!hasFillerValue && hasFillerValue(member)) {
hasFillerValue = true;
}
}
if (!hasFillerValue) {
return false;
}
Iterator<BType> iterator = memberTypes.iterator();
BType firstMember = iterator.next();
while (iterator.hasNext()) {
if (!isSameBasicType(firstMember, iterator.next())) {
return false;
}
}
if (finiteTypePresent) {
return defaultValuePresent;
}
return true;
}
private boolean isSameBasicType(BType source, BType target) {
if (isSameType(source, target)) {
return true;
}
if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) {
return true;
}
return false;
}
private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) {
Set<BType> uniqueType = new HashSet<>();
for (BLangExpression expression : valueSpace) {
uniqueType.add(expression.type);
}
return uniqueType;
}
private boolean isImplicitDefaultValue(BLangExpression expression) {
if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {
BLangLiteral literalExpression = (BLangLiteral) expression;
BType literalExprType = literalExpression.type;
Object value = literalExpression.getValue();
switch (literalExprType.getKind()) {
case INT:
case BYTE:
return value.equals(Long.valueOf(0));
case STRING:
return value == null || value.equals("");
case DECIMAL:
case FLOAT:
return value.equals(String.valueOf(0.0));
case BOOLEAN:
return value.equals(Boolean.valueOf(false));
case NIL:
return true;
default:
return false;
}
}
return false;
}
private boolean checkFillerValue(BRecordType type) {
for (BField field : type.fields.values()) {
if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
continue;
}
if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private boolean checkFillerValue(BArrayType type) {
if (type.size == -1) {
return true;
}
return hasFillerValue(type.eType);
}
/**
* Get result type of the query output.
*
* @param type type of query expression.
* @return result type.
*/
public BType resolveExprType(BType type) {
switch (type.tag) {
case TypeTags.STREAM:
return ((BStreamType) type).constraint;
case TypeTags.TABLE:
return ((BTableType) type).constraint;
case TypeTags.ARRAY:
return ((BArrayType) type).eType;
case TypeTags.UNION:
List<BType> exprTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
for (BType returnType : exprTypes) {
switch (returnType.tag) {
case TypeTags.STREAM:
return ((BStreamType) returnType).constraint;
case TypeTags.TABLE:
return ((BTableType) returnType).constraint;
case TypeTags.ARRAY:
return ((BArrayType) returnType).eType;
case TypeTags.STRING:
case TypeTags.XML:
return returnType;
}
}
default:
return type;
}
}
private boolean isSimpleBasicType(int tag) {
switch (tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
return true;
default:
return (TypeTags.isIntegerTypeTag(tag)) || (TypeTags.isStringTypeTag(tag));
}
}
/**
* Check whether a type is an ordered type.
*
* @param type type.
* @param hasCycle whether there is a cycle.
* @return boolean whether the type is an ordered type or not.
*/
public boolean isOrderedType(BType type, boolean hasCycle) {
switch (type.tag) {
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
if (hasCycle) {
return true;
}
if (unionType.isCyclic) {
hasCycle = true;
}
Set<BType> memberTypes = unionType.getMemberTypes();
boolean allMembersOrdered = false;
BType firstTypeInUnion = memberTypes.iterator().next();
for (BType memType : memberTypes) {
if (memType.tag == TypeTags.FINITE && firstTypeInUnion.tag == TypeTags.FINITE) {
Set<BLangExpression> valSpace = ((BFiniteType) firstTypeInUnion).getValueSpace();
BType baseExprType = valSpace.iterator().next().type;
if (!checkValueSpaceHasSameType((BFiniteType) memType, baseExprType)) {
return false;
}
} else if (memType.tag != firstTypeInUnion.tag && memType.tag != TypeTags.NIL &&
!isIntOrStringType(memType.tag, firstTypeInUnion.tag)) {
return false;
}
allMembersOrdered = isOrderedType(memType, hasCycle);
if (!allMembersOrdered) {
break;
}
}
return allMembersOrdered;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isOrderedType(elementType, hasCycle);
case TypeTags.TUPLE:
List<BType> tupleMemberTypes = ((BTupleType) type).tupleTypes;
for (BType memType : tupleMemberTypes) {
if (!isOrderedType(memType, hasCycle)) {
return false;
}
}
BType restType = ((BTupleType) type).restType;
return restType == null || isOrderedType(restType, hasCycle);
case TypeTags.FINITE:
boolean isValueSpaceOrdered = false;
Set<BLangExpression> valSpace = ((BFiniteType) type).getValueSpace();
BType baseExprType = valSpace.iterator().next().type;
for (BLangExpression expr : valSpace) {
if (!checkValueSpaceHasSameType((BFiniteType) type, baseExprType)) {
return false;
}
isValueSpaceOrdered = isOrderedType(expr.type, hasCycle);
if (!isValueSpaceOrdered) {
break;
}
}
return isValueSpaceOrdered;
default:
return isSimpleBasicType(type.tag);
}
}
private boolean isIntOrStringType(int firstTypeTag, int secondTypeTag) {
return ((TypeTags.isIntegerTypeTag(firstTypeTag)) && (TypeTags.isIntegerTypeTag(secondTypeTag))) ||
((TypeTags.isStringTypeTag(firstTypeTag)) && (TypeTags.isStringTypeTag(secondTypeTag)));
}
public boolean isUnionOfSimpleBasicTypes(BType type) {
if (type.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isSimpleBasicType(memType.tag)) {
return false;
}
}
return true;
}
return isSimpleBasicType(type.tag);
}
public boolean isSubTypeOfReadOnlyOrIsolatedObjectUnion(BType type) {
if (isInherentlyImmutableType(type) || Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
int tag = type.tag;
if (tag == TypeTags.OBJECT) {
return isIsolated(type);
}
if (tag != TypeTags.UNION) {
return false;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isSubTypeOfReadOnlyOrIsolatedObjectUnion(memberType)) {
return false;
}
}
return true;
}
private boolean isIsolated(BType type) {
return Symbols.isFlagOn(type.flags, Flags.ISOLATED);
}
BType getTypeWithoutNil(BType type) {
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
if (!unionType.isNullable()) {
return unionType;
}
List<BType> nonNilTypes = new ArrayList<>();
for (BType memberType : unionType.getMemberTypes()) {
if (!isAssignable(memberType, symTable.nilType)) {
nonNilTypes.add(memberType);
}
}
if (nonNilTypes.size() == 1) {
return nonNilTypes.get(0);
}
return BUnionType.create(null, new LinkedHashSet<>(nonNilTypes));
}
boolean isNeverTypeOrStructureTypeWithARequiredNeverMember(BType type) {
switch (type.tag) {
case TypeTags.NEVER:
return true;
case TypeTags.RECORD:
for (BField field : ((BRecordType) type).fields.values()) {
if (!isSameType(type, field.type) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED) &&
isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
return true;
}
}
return false;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
List<BType> tupleTypes = tupleType.tupleTypes;
for (BType mem : tupleTypes) {
if (isNeverTypeOrStructureTypeWithARequiredNeverMember(mem)) {
return true;
}
}
return false;
default:
return false;
}
}
private static class ListenerValidationModel {
private final Types types;
private final SymbolTable symtable;
private final BType serviceNameType;
boolean attachFound;
boolean detachFound;
boolean startFound;
boolean gracefulStopFound;
boolean immediateStopFound;
public ListenerValidationModel(Types types, SymbolTable symTable) {
this.types = types;
this.symtable = symTable;
this.serviceNameType =
BUnionType.create(null, symtable.stringType, symtable.arrayStringType, symtable.nilType);
}
boolean isValidListener() {
return attachFound && detachFound && startFound && gracefulStopFound && immediateStopFound;
}
private boolean checkMethods(List<BAttachedFunction> rhsFuncs) {
for (BAttachedFunction func : rhsFuncs) {
switch (func.funcName.value) {
case "attach":
if (!checkAttachMethod(func)) {
return false;
}
break;
case "detach":
if (!checkDetachMethod(func)) {
return false;
}
break;
case "start":
if (!checkStartMethod(func)) {
return true;
}
break;
case "gracefulStop":
if (!checkGracefulStop(func)) {
return false;
}
break;
case "immediateStop":
if (!checkImmediateStop(func)) {
return false;
}
break;
}
}
return isValidListener();
}
private boolean emptyParamList(BAttachedFunction func) {
return func.type.paramTypes.isEmpty() && func.type.restType != symtable.noType;
}
private boolean publicAndReturnsErrorOrNil(BAttachedFunction func) {
if (!Symbols.isPublic(func.symbol)) {
return false;
}
return types.isAssignable(func.type.retType, symtable.errorOrNilType);
}
private boolean isPublicNoParamReturnsErrorOrNil(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
return emptyParamList(func);
}
private boolean checkImmediateStop(BAttachedFunction func) {
return immediateStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkGracefulStop(BAttachedFunction func) {
return gracefulStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkStartMethod(BAttachedFunction func) {
return startFound = publicAndReturnsErrorOrNil(func);
}
private boolean checkDetachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 1) {
return false;
}
return detachFound = isServiceObject(func.type.paramTypes.get(0));
}
private boolean checkAttachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 2) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
if (!isServiceObject(firstParamType)) {
return false;
}
BType secondParamType = func.type.paramTypes.get(1);
boolean sameType = types.isAssignable(secondParamType, this.serviceNameType);
return attachFound = sameType;
}
private boolean isServiceObject(BType type) {
if (type.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isServiceObject(memberType)) {
return false;
}
}
return true;
}
if (type.tag != TypeTags.OBJECT) {
return false;
}
return Symbols.isService(type.tsymbol);
}
}
/**
* Intersection type validation helper.
*
* @since 2.0.0
*/
public static class IntersectionContext {
Location lhsPos;
Location rhsPos;
BLangDiagnosticLog dlog;
ContextOption contextOption;
boolean compilerInternalIntersectionTest;
boolean preferNonGenerativeIntersection;
private IntersectionContext(BLangDiagnosticLog diaglog, Location left, Location right) {
this.dlog = diaglog;
this.lhsPos = left;
this.rhsPos = right;
this.contextOption = ContextOption.NON;
this.compilerInternalIntersectionTest = false;
this.preferNonGenerativeIntersection = false;
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type when user
* explicitly write intersection type. This will produce error messages explaining why there is no intersection
* between two types.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext from(BLangDiagnosticLog diaglog, Location left, Location right) {
return new IntersectionContext(diaglog, left, right);
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type to see if there
* is a intersection between the types. This does not emit error messages explaning why there is no intersection
* between two types. This also does not generate type-def for the calculated intersection type.
* Do not use this context to create a intersection type that uses the calculated type for any purpose other
* than seeing if there is a interserction.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext compilerInternalIntersectionTestContext() {
IntersectionContext diagnosticContext = new IntersectionContext(null, null, null);
diagnosticContext.compilerInternalIntersectionTest = true;
return diagnosticContext;
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type.
* This does not emit error messages explaning why there is no intersection between two types.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext compilerInternalIntersectionContext() {
IntersectionContext diagnosticContext = new IntersectionContext(null, null, null);
return diagnosticContext;
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type, that try not to generate
* new types when possible.
* This is to preserve the previous type-narrowing semantic of intersection calculation.
* This does not emit error messages explaning why there is no intersection between two types.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext compilerInternalNonGenerativeIntersectionContext() {
IntersectionContext diagnosticContext = new IntersectionContext(null, null, null);
diagnosticContext.preferNonGenerativeIntersection = true;
return diagnosticContext;
}
public IntersectionContext switchLeft() {
this.contextOption = ContextOption.LEFT;
return this;
}
public IntersectionContext switchRight() {
this.contextOption = ContextOption.RIGHT;
return this;
}
private boolean logError(DiagnosticErrorCode diagnosticCode, Object... args) {
Location pos = null;
if (contextOption == ContextOption.LEFT && lhsPos != null) {
pos = lhsPos;
} else if (contextOption == ContextOption.RIGHT && rhsPos != null) {
pos = rhsPos;
}
if (pos != null) {
dlog.error(pos, diagnosticCode, args);
return true;
}
return false;
}
}
private enum ContextOption {
LEFT, RIGHT, NON;
}
} | class Types {
private static final CompilerContext.Key<Types> TYPES_KEY =
new CompilerContext.Key<>();
private final Unifier unifier;
private SymbolTable symTable;
private SymbolResolver symResolver;
private BLangDiagnosticLog dlog;
private Names names;
private int finiteTypeCount = 0;
private BUnionType expandedXMLBuiltinSubtypes;
private final BLangAnonymousModelHelper anonymousModelHelper;
private int recordCount = 0;
private SymbolEnv env;
private boolean inOrderedType;
public static Types getInstance(CompilerContext context) {
Types types = context.get(TYPES_KEY);
if (types == null) {
types = new Types(context);
}
return types;
}
public Types(CompilerContext context) {
context.put(TYPES_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.names = Names.getInstance(context);
this.expandedXMLBuiltinSubtypes = BUnionType.create(null,
symTable.xmlElementType, symTable.xmlCommentType,
symTable.xmlPIType, symTable.xmlTextType);
this.unifier = new Unifier();
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
}
public List<BType> checkTypes(BLangExpression node,
List<BType> actualTypes,
List<BType> expTypes) {
List<BType> resTypes = new ArrayList<>();
for (int i = 0; i < actualTypes.size(); i++) {
resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));
}
return resTypes;
}
public BType checkType(BLangExpression node,
BType actualType,
BType expType) {
return checkType(node, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkType(BLangExpression expr,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
expr.type = checkType(expr.pos, actualType, expType, diagCode);
if (expr.type.tag == TypeTags.SEMANTIC_ERROR) {
return expr.type;
}
setImplicitCastExpr(expr, actualType, expType);
return expr.type;
}
public BType checkType(Location pos,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
if (expType.tag == TypeTags.SEMANTIC_ERROR) {
return expType;
} else if (expType.tag == TypeTags.NONE) {
return actualType;
} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {
return actualType;
} else if (isAssignable(actualType, expType)) {
return actualType;
}
dlog.error(pos, diagCode, expType, actualType);
return symTable.semanticError;
}
public boolean isLax(BType type) {
Set<BType> visited = new HashSet<>();
int result = isLaxType(type, visited);
if (result == 1) {
return true;
}
return false;
}
public int isLaxType(BType type, Set<BType> visited) {
if (!visited.add(type)) {
return -1;
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return 1;
case TypeTags.MAP:
return isLaxType(((BMapType) type).constraint, visited);
case TypeTags.UNION:
if (isSameType(type, symTable.jsonType)) {
visited.add(type);
return 1;
}
boolean atleastOneLaxType = false;
for (BType member : ((BUnionType) type).getMemberTypes()) {
int result = isLaxType(member, visited);
if (result == -1) {
continue;
}
if (result == 0) {
return 0;
}
atleastOneLaxType = true;
}
return atleastOneLaxType ? 1 : 0;
}
return 0;
}
public boolean isLaxType(BType type, Map<BType, Boolean> visited) {
if (visited.containsKey(type)) {
return visited.get(type);
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
visited.put(type, true);
return true;
case TypeTags.MAP:
boolean result = isLaxType(((BMapType) type).constraint, visited);
visited.put(type, result);
return result;
case TypeTags.UNION:
if (type == symTable.jsonType || isSameType(type, symTable.jsonType)) {
visited.put(type, true);
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!isLaxType(member, visited)) {
visited.put(type, false);
return false;
}
}
visited.put(type, true);
return true;
}
visited.put(type, false);
return false;
}
public boolean isSameType(BType source, BType target) {
return isSameType(source, target, new HashSet<>());
}
public boolean isSameOrderedType(BType source, BType target) {
this.inOrderedType = true;
return isSameType(source, target);
}
public boolean isPureType(BType type) {
IsPureTypeUniqueVisitor visitor = new IsPureTypeUniqueVisitor();
return visitor.visit(type);
}
public boolean isAnydata(BType type) {
IsAnydataUniqueVisitor visitor = new IsAnydataUniqueVisitor();
return visitor.visit(type);
}
private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);
return target.accept(sameTypeVisitor, source);
}
public boolean isValueType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.BYTE:
case TypeTags.DECIMAL:
case TypeTags.FLOAT:
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
return true;
default:
return false;
}
}
boolean isBasicNumericType(BType type) {
return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);
}
boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {
return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));
}
public boolean containsErrorType(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(this::containsErrorType);
}
if (type.tag == TypeTags.READONLY) {
return true;
}
return type.tag == TypeTags.ERROR;
}
public boolean isSubTypeOfList(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);
}
BType resolvePatternTypeFromMatchExpr(BLangErrorBindingPattern errorBindingPattern, BLangExpression matchExpr,
SymbolEnv env) {
if (matchExpr == null) {
return errorBindingPattern.type;
}
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
matchExpr.type, errorBindingPattern.type, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
public BType resolvePatternTypeFromMatchExpr(BLangListBindingPattern listBindingPattern,
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern,
SymbolEnv env) {
BTupleType listBindingPatternType = (BTupleType) listBindingPattern.type;
if (varBindingPatternMatchPattern.matchExpr == null) {
return listBindingPatternType;
}
BType matchExprType = varBindingPatternMatchPattern.matchExpr.type;
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
matchExprType, listBindingPatternType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listBindingPatternType.tupleTypes, symTable.anydataType);
if (listBindingPatternType.restType != null) {
listBindingPatternType.restType = symTable.anydataType;
}
return listBindingPatternType;
}
return symTable.noType;
}
public BType resolvePatternTypeFromMatchExpr(BLangListMatchPattern listMatchPattern,
BTupleType listMatchPatternType, SymbolEnv env) {
if (listMatchPattern.matchExpr == null) {
return listMatchPatternType;
}
BType matchExprType = listMatchPattern.matchExpr.type;
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
matchExprType, listMatchPatternType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listMatchPatternType.tupleTypes, symTable.anydataType);
if (listMatchPatternType.restType != null) {
listMatchPatternType.restType = symTable.anydataType;
}
return listMatchPatternType;
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangErrorMatchPattern errorMatchPattern, BLangExpression matchExpr) {
if (matchExpr == null) {
return errorMatchPattern.type;
}
BType matchExprType = matchExpr.type;
BType patternType = errorMatchPattern.type;
if (isAssignable(matchExprType, patternType)) {
return matchExprType;
}
if (isAssignable(patternType, matchExprType)) {
return patternType;
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangConstPattern constPattern, BLangExpression constPatternExpr) {
if (constPattern.matchExpr == null) {
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) constPatternExpr).symbol.type;
} else {
return constPatternExpr.type;
}
}
BType matchExprType = constPattern.matchExpr.type;
BType constMatchPatternExprType = constPatternExpr.type;
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef constVarRef = (BLangSimpleVarRef) constPatternExpr;
BType constVarRefSymbolType = constVarRef.symbol.type;
if (isAssignable(constVarRefSymbolType, matchExprType)) {
return constVarRefSymbolType;
}
return symTable.noType;
}
BLangLiteral constPatternLiteral = (BLangLiteral) constPatternExpr;
if (containsAnyType(constMatchPatternExprType)) {
return matchExprType;
} else if (containsAnyType(matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.BYTE && constMatchPatternExprType.tag == TypeTags.INT) {
return matchExprType;
}
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) matchExprType).getMemberTypes()) {
if (memberType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(memberType, constPatternLiteral)) {
return memberType;
}
} else {
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
}
}
} else if (matchExprType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(matchExprType, constPatternLiteral)) {
return matchExprType;
}
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangMappingMatchPattern mappingMatchPattern, BType patternType,
SymbolEnv env) {
if (mappingMatchPattern.matchExpr == null) {
return patternType;
}
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
mappingMatchPattern.matchExpr.type, patternType, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
public BType resolvePatternTypeFromMatchExpr(BLangMappingBindingPattern mappingBindingPattern,
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern,
SymbolEnv env) {
BRecordType mappingBindingPatternType = (BRecordType) mappingBindingPattern.type;
if (varBindingPatternMatchPattern.matchExpr == null) {
return mappingBindingPatternType;
}
BType intersectionType = getTypeIntersection(
IntersectionContext.compilerInternalIntersectionContext(),
varBindingPatternMatchPattern.matchExpr.type,
mappingBindingPatternType, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
private boolean containsAnyType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANY;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANY) {
return true;
}
}
return false;
}
private boolean containsAnyDataType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANYDATA;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANYDATA) {
return true;
}
}
return false;
}
BType mergeTypes(BType typeFirst, BType typeSecond) {
if (containsAnyType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (containsAnyDataType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyDataType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (isSameBasicType(typeFirst, typeSecond)) {
return typeFirst;
}
return BUnionType.create(null, typeFirst, typeSecond);
}
public boolean isSubTypeOfMapping(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);
}
public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {
if (type.tag != TypeTags.UNION) {
return type.tag == baseTypeTag || (baseTypeTag == TypeTags.TUPLE && type.tag == TypeTags.ARRAY)
|| (baseTypeTag == TypeTags.ARRAY && type.tag == TypeTags.TUPLE);
}
if (TypeTags.isXMLTypeTag(baseTypeTag)) {
return true;
}
return isUnionMemberTypesSubTypeOfBaseType(((BUnionType) type).getMemberTypes(), baseTypeTag);
}
private boolean isUnionMemberTypesSubTypeOfBaseType(LinkedHashSet<BType> memberTypes, int baseTypeTag) {
for (BType type : memberTypes) {
if (!isSubTypeOfBaseType(type, baseTypeTag)) {
return false;
}
}
return true;
}
/**
* Checks whether source type is assignable to the target type.
* <p>
* Source type is assignable to the target type if,
* 1) the target type is any and the source type is not a value type.
* 2) there exists an implicit cast symbol from source to target.
* 3) both types are JSON and the target constraint is no type.
* 4) both types are array type and both array types are assignable.
* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.
*
* @param source type.
* @param target type.
* @return true if source type is assignable to the target type.
*/
public boolean isAssignable(BType source, BType target) {
return isAssignable(source, target, new HashSet<>());
}
private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (isSameType(source, target)) {
return true;
}
int sourceTag = source.tag;
int targetTag = target.tag;
if (!Symbols.isFlagOn(source.flags, Flags.PARAMETERIZED) &&
!isInherentlyImmutableType(target) && Symbols.isFlagOn(target.flags, Flags.READONLY) &&
!isInherentlyImmutableType(source) && isMutable(source)) {
return false;
}
if (sourceTag == TypeTags.INTERSECTION) {
return isAssignable(((BIntersectionType) source).effectiveType,
targetTag != TypeTags.INTERSECTION ? target :
((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (targetTag == TypeTags.INTERSECTION) {
return isAssignable(source, ((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (sourceTag == TypeTags.PARAMETERIZED_TYPE) {
return isParameterizedTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {
return true;
}
if (TypeTags.isXMLTypeTag(sourceTag) && TypeTags.isXMLTypeTag(targetTag)) {
return isXMLTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {
return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);
} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {
return false;
}
if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {
return true;
}
if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {
return true;
}
if (targetTag == TypeTags.ANYDATA && !containsErrorType(source) && isAnydata(source)) {
return true;
}
if (targetTag == TypeTags.READONLY) {
if ((isInherentlyImmutableType(source) || Symbols.isFlagOn(source.flags, Flags.READONLY))) {
return true;
}
if (isAssignable(source, symTable.anyAndReadonlyOrError, unresolvedTypes)) {
return true;
}
}
if (sourceTag == TypeTags.READONLY && isAssignable(symTable.anyAndReadonlyOrError, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {
BRecordType recordType = (BRecordType) source;
return isAssignableRecordType(recordType, target, unresolvedTypes);
}
if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) {
return isAssignableMapType((BMapType) source, (BRecordType) target);
}
if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {
return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),
unresolvedTypes);
}
if (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {
return isAssignableTableType((BTableType) source, (BTableType) target, unresolvedTypes);
}
if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {
return isAssignableStreamType((BStreamType) source, (BStreamType) target, unresolvedTypes);
}
if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {
return true;
}
if (sourceTag == TypeTags.FINITE) {
return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);
}
if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&
isAssignableToUnionType(source, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.JSON) {
if (sourceTag == TypeTags.JSON) {
return true;
}
if (sourceTag == TypeTags.ARRAY) {
return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.MAP) {
return isAssignable(((BMapType) source).constraint, target, unresolvedTypes);
}
if (sourceTag == TypeTags.RECORD) {
return isAssignableRecordType((BRecordType) source, target, unresolvedTypes);
}
}
if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {
if (((BFutureType) target).constraint.tag == TypeTags.NONE) {
return true;
}
return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {
if (((BMapType) target).constraint.tag == TypeTags.ANY &&
((BMapType) source).constraint.tag != TypeTags.UNION) {
return true;
}
return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);
}
if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)
&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {
return checkStructEquivalency(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {
return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {
return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {
return isTupleTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {
return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());
}
return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&
isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
private boolean isMutable(BType type) {
if (Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return false;
}
if (type.tag != TypeTags.UNION) {
return true;
}
BUnionType unionType = (BUnionType) type;
for (BType memberType : unionType.getMemberTypes()) {
if (!Symbols.isFlagOn(memberType.flags, Flags.READONLY)) {
return true;
}
}
unionType.flags |= Flags.READONLY;
BTypeSymbol tsymbol = unionType.tsymbol;
if (tsymbol != null) {
tsymbol.flags |= Flags.READONLY;
}
return false;
}
private boolean isParameterizedTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
BType resolvedSourceType = unifier.build(source);
if (target.tag != TypeTags.PARAMETERIZED_TYPE) {
return isAssignable(resolvedSourceType, target, unresolvedTypes);
}
if (((BParameterizedType) source).paramIndex != ((BParameterizedType) target).paramIndex) {
return false;
}
return isAssignable(resolvedSourceType, unifier.build(target), unresolvedTypes);
}
private boolean isAssignableRecordType(BRecordType recordType, BType type, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(recordType, type);
if (!unresolvedTypes.add(pair)) {
return true;
}
BType targetType;
switch (type.tag) {
case TypeTags.MAP:
targetType = ((BMapType) type).constraint;
break;
case TypeTags.JSON:
targetType = type;
break;
default:
throw new IllegalArgumentException("Incompatible target type: " + type.toString());
}
return recordFieldsAssignableToType(recordType, targetType, unresolvedTypes);
}
private boolean isAssignableStreamType(BStreamType sourceStreamType, BStreamType targetStreamType,
Set<TypePair> unresolvedTypes) {
return isAssignable(sourceStreamType.constraint, targetStreamType.constraint, unresolvedTypes)
&& isAssignable(sourceStreamType.error, targetStreamType.error, unresolvedTypes);
}
private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType,
Set<TypePair> unresolvedTypes) {
for (BField field : recordType.fields.values()) {
if (!isAssignable(field.type, targetType, unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return isAssignable(recordType.restFieldType, targetType, unresolvedTypes);
}
return true;
}
private boolean isAssignableTableType(BTableType sourceTableType, BTableType targetTableType,
Set<TypePair> unresolvedTypes) {
if (!isAssignable(sourceTableType.constraint, targetTableType.constraint, unresolvedTypes)) {
return false;
}
if (targetTableType.keyTypeConstraint == null && targetTableType.fieldNameList == null) {
return true;
}
if (targetTableType.keyTypeConstraint != null) {
if (sourceTableType.keyTypeConstraint != null &&
(isAssignable(sourceTableType.keyTypeConstraint, targetTableType.keyTypeConstraint,
unresolvedTypes))) {
return true;
}
if (sourceTableType.fieldNameList == null) {
return false;
}
List<BType> fieldTypes = new ArrayList<>();
sourceTableType.fieldNameList.forEach(field -> fieldTypes
.add(getTableConstraintField(sourceTableType.constraint, field).type));
if (fieldTypes.size() == 1) {
return isAssignable(fieldTypes.get(0), targetTableType.keyTypeConstraint, unresolvedTypes);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return isAssignable(tupleType, targetTableType.keyTypeConstraint, unresolvedTypes);
}
return targetTableType.fieldNameList.equals(sourceTableType.fieldNameList);
}
BField getTableConstraintField(BType constraintType, String fieldName) {
switch (constraintType.tag) {
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) constraintType).getFields();
return fieldList.get(fieldName);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) constraintType;
Set<BType> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isAssignable(field.type, fields.get(0).type) &&
isAssignable(fields.get(0).type, field.type))) {
return fields.get(0);
}
break;
case TypeTags.INTERSECTION:
return getTableConstraintField(((BIntersectionType) constraintType).effectiveType, fieldName);
}
return null;
}
private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) {
if (targetRecType.sealed) {
return false;
}
for (BField field : targetRecType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
return false;
}
if (hasIncompatibleReadOnlyFlags(field.symbol.flags, sourceMapType.flags)) {
return false;
}
if (!isAssignable(sourceMapType.constraint, field.type)) {
return false;
}
}
return isAssignable(sourceMapType.constraint, targetRecType.restFieldType);
}
private boolean hasIncompatibleReadOnlyFlags(long targetFlags, long sourceFlags) {
return Symbols.isFlagOn(targetFlags, Flags.READONLY) && !Symbols.isFlagOn(sourceFlags, Flags.READONLY);
}
private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) {
if (target == symTable.errorType) {
return true;
}
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
return isAssignable(source.detailType, target.detailType, unresolvedTypes)
&& target.typeIdSet.isAssignableFrom(source.typeIdSet);
}
private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) {
int sourceTag = sourceType.tag;
int targetTag = targetType.tag;
if (targetTag == TypeTags.XML) {
BXMLType target = (BXMLType) targetType;
if (target.constraint != null) {
if (TypeTags.isXMLNonSequenceType(sourceTag)) {
return isAssignable(sourceType, target.constraint, unresolvedTypes);
}
BXMLType source = (BXMLType) sourceType;
if (source.constraint.tag == TypeTags.NEVER) {
if (sourceTag == targetTag) {
return true;
}
return isAssignable(source, target.constraint, unresolvedTypes);
}
return isAssignable(source.constraint, target.constraint, unresolvedTypes);
}
return true;
}
if (sourceTag == TypeTags.XML) {
BXMLType source = (BXMLType) sourceType;
if (targetTag == TypeTags.XML_TEXT) {
if (source.constraint != null) {
return source.constraint.tag == TypeTags.NEVER;
}
return false;
}
}
return sourceTag == targetTag;
}
private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.restType == null && rhsTupleType.restType != null) {
return false;
}
if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
if (lhsTupleType.restType != null && rhsTupleType.restType != null) {
if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {
return false;
}
}
if (lhsTupleType.tupleTypes.size() > rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {
BType lhsType = (lhsTupleType.tupleTypes.size() > i)
? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;
if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {
return false;
}
}
return true;
}
private boolean checkAllTupleMembersBelongNoType(List<BType> tupleTypes) {
boolean isNoType = false;
for (BType memberType : tupleTypes) {
switch (memberType.tag) {
case TypeTags.NONE:
isNoType = true;
break;
case TypeTags.TUPLE:
isNoType = checkAllTupleMembersBelongNoType(((BTupleType) memberType).tupleTypes);
if (!isNoType) {
return false;
}
break;
default:
return false;
}
}
return isNoType;
}
private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,
Set<TypePair> unresolvedTypes) {
if (target.state != BArrayState.OPEN
&& (source.restType != null || source.tupleTypes.size() != target.size)) {
return false;
}
List<BType> sourceTypes = new ArrayList<>(source.tupleTypes);
if (source.restType != null) {
sourceTypes.add(source.restType);
}
return sourceTypes.stream()
.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));
}
private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,
Set<TypePair> unresolvedTypes) {
BType restType = target.restType;
List<BType> tupleTypes = target.tupleTypes;
if (source.state == BArrayState.OPEN) {
if (restType == null || !tupleTypes.isEmpty()) {
return false;
}
return isAssignable(source.eType, restType, unresolvedTypes);
}
int targetTupleMemberSize = tupleTypes.size();
int sourceArraySize = source.size;
if (targetTupleMemberSize > sourceArraySize) {
return false;
}
if (restType == null && targetTupleMemberSize < sourceArraySize) {
return false;
}
BType sourceElementType = source.eType;
for (BType memType : tupleTypes) {
if (!isAssignable(sourceElementType, memType, unresolvedTypes)) {
return false;
}
}
if (restType == null) {
return true;
}
return sourceArraySize == targetTupleMemberSize || isAssignable(sourceElementType, restType, unresolvedTypes);
}
private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) {
BType sourceElementType = source.getElementType();
if (target.tag == TypeTags.ARRAY) {
BArrayType targetArrayType = (BArrayType) target;
BType targetElementType = targetArrayType.getElementType();
if (targetArrayType.state == BArrayState.OPEN) {
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
}
if (targetArrayType.size != source.size) {
return false;
}
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
} else if (target.tag == TypeTags.JSON) {
return isAssignable(sourceElementType, target, unresolvedTypes);
} else if (target.tag == TypeTags.ANYDATA) {
return isAssignable(sourceElementType, target, unresolvedTypes);
}
return false;
}
private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (Symbols.isFlagOn(target.flags, Flags.ANY_FUNCTION)) {
return true;
}
if (containsTypeParams(target)) {
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
BType sourceParam = source.paramTypes.get(i);
BType targetParam = target.paramTypes.get(i);
boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);
if (isTypeParam) {
if (!isAssignable(sourceParam, targetParam)) {
return false;
}
} else {
if (!isAssignable(targetParam, sourceParam)) {
return false;
}
}
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));
}
public boolean isInherentlyImmutableType(BType type) {
if (isValueType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.XML_TEXT:
case TypeTags.FINITE:
case TypeTags.READONLY:
case TypeTags.NIL:
case TypeTags.ERROR:
case TypeTags.INVOKABLE:
case TypeTags.TYPEDESC:
case TypeTags.HANDLE:
return true;
case TypeTags.XML:
return ((BXMLType) type).constraint.tag == TypeTags.NEVER;
}
return false;
}
boolean isSelectivelyImmutableType(BType type) {
return isSelectivelyImmutableType(type, new HashSet<>(), false);
}
boolean isSelectivelyImmutableType(BType type, boolean forceCheck) {
return isSelectivelyImmutableType(type, new HashSet<>(), forceCheck);
}
public boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes) {
return isSelectivelyImmutableType(type, unresolvedTypes, false);
}
private boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes, boolean forceCheck) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, forceCheck);
}
private boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, Set<BType> unresolvedTypes,
boolean forceCheck) {
if (isInherentlyImmutableType(type) || !(type instanceof SelectivelyImmutableReferenceType)) {
return false;
}
if (!unresolvedTypes.add(type)) {
return true;
}
if (!forceCheck && ((SelectivelyImmutableReferenceType) type).getImmutableType() != null) {
return true;
}
switch (type.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_COMMENT:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_PI:
return true;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes, forceCheck);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
for (BType tupMemType : tupleType.tupleTypes) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType tupRestType = tupleType.restType;
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes, forceCheck);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType recordRestType = recordType.restFieldType;
if (recordRestType == null || recordRestType == symTable.noType) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes, forceCheck);
case TypeTags.MAP:
BType constraintType = ((BMapType) type).constraint;
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes, forceCheck);
case TypeTags.OBJECT:
BObjectType objectType = (BObjectType) type;
for (BField field : objectType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
return true;
case TypeTags.TABLE:
BType tableConstraintType = ((BTableType) type).constraint;
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes, forceCheck);
case TypeTags.UNION:
boolean readonlyIntersectionExists = false;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, unresolvedTypes, forceCheck)) {
readonlyIntersectionExists = true;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION:
return isSelectivelyImmutableType(((BIntersectionType) type).effectiveType, unresolvedTypes,
forceCheck);
}
return false;
}
private boolean containsTypeParams(BInvokableType type) {
boolean hasParameterizedTypes = type.paramTypes.stream()
.anyMatch(t -> {
if (t.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) t);
}
return TypeParamAnalyzer.isTypeParam(t);
});
if (hasParameterizedTypes) {
return hasParameterizedTypes;
}
if (type.retType.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) type.retType);
}
return TypeParamAnalyzer.isTypeParam(type.retType);
}
private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) {
return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);
}
private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (Symbols.isFlagOn(target.flags, Flags.ANY_FUNCTION) && Symbols.isFlagOn(source.flags, Flags.ANY_FUNCTION)) {
return true;
}
if (Symbols.isFlagOn(target.flags, Flags.ANY_FUNCTION) || Symbols.isFlagOn(source.flags, Flags.ANY_FUNCTION)) {
return false;
}
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {
return false;
}
}
if ((source.restType != null && target.restType == null) ||
target.restType != null && source.restType == null) {
return false;
} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {
return false;
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
private boolean hasIncompatibleIsolatedFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(target.flags, Flags.ISOLATED) && !Symbols.isFlagOn(source.flags, Flags.ISOLATED);
}
private boolean hasIncompatibleTransactionalFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(source.flags, Flags.TRANSACTIONAL) &&
!Symbols.isFlagOn(target.flags, Flags.TRANSACTIONAL);
}
public boolean isSameArrayType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType lhsArrayType = (BArrayType) target;
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);
if (lhsArrayType.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameTypeElements;
}
return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements;
}
public boolean isSameStreamType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.STREAM || source.tag != TypeTags.STREAM) {
return false;
}
BStreamType lhsStreamType = (BStreamType) target;
BStreamType rhsStreamType = (BStreamType) source;
return isSameType(lhsStreamType.constraint, rhsStreamType.constraint, unresolvedTypes)
&& isSameType(lhsStreamType.error, rhsStreamType.error, unresolvedTypes);
}
public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {
return lhsArrayType.size == rhsArrayType.size;
}
public boolean checkStructEquivalency(BType rhsType, BType lhsType) {
return checkStructEquivalency(rhsType, lhsType, new HashSet<>());
}
private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(rhsType, lhsType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {
return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);
}
if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);
}
return false;
}
public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.flags, Flags.ISOLATED) && !Symbols.isFlagOn(rhsType.flags, Flags.ISOLATED)) {
return false;
}
BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;
BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;
List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs;
List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;
int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);
int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);
boolean isLhsAService = Symbols.isService(lhsStructSymbol);
if (isLhsAService && !Symbols.isService(rhsStructSymbol)) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {
return false;
}
for (BField bField : lhsType.fields.values()) {
if (Symbols.isPrivate(bField.symbol)) {
return false;
}
}
for (BAttachedFunction func : lhsFuncs) {
if (Symbols.isPrivate(func.symbol)) {
return false;
}
}
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null ||
!isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) ||
!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
}
for (BAttachedFunction lhsFunc : lhsFuncs) {
if (lhsFunc == lhsStructSymbol.initializerFunc) {
continue;
}
if (isLhsAService && Symbols.isResource(lhsFunc.symbol)) {
continue;
}
BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {
return false;
}
if (Symbols.isRemote(lhsFunc.symbol) != Symbols.isRemote(rhsFunc.symbol)) {
return false;
}
}
return lhsType.typeIdSet.isAssignableFrom(rhsType.typeIdSet);
}
private int getObjectFuncCount(BObjectTypeSymbol sym) {
if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {
return sym.attachedFuncs.size() - 1;
}
return sym.attachedFuncs.size();
}
public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) {
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {
return false;
}
return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);
}
public void setInputClauseTypedBindingPatternType(BLangInputClause bLangInputClause) {
if (bLangInputClause.collection == null) {
return;
}
BType collectionType = bLangInputClause.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
BXMLType xmlType = (BXMLType) collectionType;
varType = xmlType.constraint;
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
break;
case TypeTags.OBJECT:
if (!isAssignable(bLangInputClause.collection.type, symTable.iterableType)) {
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.INVALID_ITERABLE_OBJECT_TYPE,
bLangInputClause.collection.type, symTable.iterableType);
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
}
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
bLangInputClause.varType = ((BRecordType) bLangInputClause.resultType).fields.get("value").type;
return;
}
case TypeTags.SEMANTIC_ERROR:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
default:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
bLangInputClause.varType = varType;
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
}
public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC)) {
return getVarTypeFromIteratorFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {
if (!candidateIteratorFunc.type.paramTypes.isEmpty()) {
return null;
}
BType returnType = candidateIteratorFunc.type.retType;
return getVarTypeFromIteratorFuncReturnType(returnType);
}
public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {
BObjectTypeSymbol objectTypeSymbol;
if (returnType.tag != TypeTags.OBJECT) {
return null;
}
objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {
return getVarTypeFromNextFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {
BType returnType;
if (!nextFunc.type.paramTypes.isEmpty()) {
return null;
}
returnType = nextFunc.type.retType;
if (checkNextFuncReturnType(returnType)) {
return (BUnionType) returnType;
}
return null;
}
private boolean checkNextFuncReturnType(BType returnType) {
if (returnType.tag != TypeTags.UNION) {
return false;
}
List<BType> types = getAllTypes(returnType);
boolean containsCompletionType = types.removeIf(type -> type.tag == TypeTags.NIL);
containsCompletionType = types.removeIf(type -> type.tag == TypeTags.ERROR) || containsCompletionType;
if (!containsCompletionType) {
return false;
}
if (types.size() != 1) {
return false;
}
if (types.get(0).tag != TypeTags.RECORD) {
return false;
}
BRecordType recordType = (BRecordType) types.get(0);
return checkRecordTypeInNextFuncReturnType(recordType);
}
private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {
if (!recordType.sealed) {
return false;
}
if (recordType.fields.size() != 1) {
return false;
}
return recordType.fields.containsKey(BLangCompilerConstants.VALUE_FIELD);
}
private BRecordType getRecordType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.RECORD) {
return (BRecordType) member;
}
}
return null;
}
public BErrorType getErrorType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.ERROR) {
return (BErrorType) member;
} else if (member.tag == TypeTags.UNION) {
BErrorType e = getErrorType((BUnionType) member);
if (e != null) {
return e;
}
}
}
return null;
}
public BType getResultTypeOfNextInvocation(BObjectType iteratorType) {
BAttachedFunction nextFunc = getAttachedFuncFromObject(iteratorType, BLangCompilerConstants.NEXT_FUNC);
return Objects.requireNonNull(nextFunc).type.retType;
}
public BAttachedFunction getAttachedFuncFromObject(BObjectType objectType, String funcName) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) objectType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (funcName.equals(bAttachedFunction.funcName.value)) {
return bAttachedFunction;
}
}
return null;
}
public BType inferRecordFieldType(BRecordType recordType) {
Map<String, BField> fields = recordType.fields;
BUnionType unionType = BUnionType.create(null);
if (!recordType.sealed) {
unionType.add(recordType.restFieldType);
} else if (fields.size() == 0) {
unionType.add(symTable.neverType);
}
for (BField field : fields.values()) {
if (isAssignable(field.type, unionType)) {
continue;
}
if (isAssignable(unionType, field.type)) {
unionType = BUnionType.create(null);
}
unionType.add(field.type);
}
if (unionType.getMemberTypes().size() > 1) {
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, recordType.tsymbol.pkgID, null,
recordType.tsymbol.owner, symTable.builtinPos, VIRTUAL);
return unionType;
}
return unionType.getMemberTypes().iterator().next();
}
/**
* Enum to represent type test result.
*
* @since 1.2.0
*/
enum TypeTestResult {
NOT_FOUND,
TRUE,
FALSE
}
TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {
int targetTag = targetType.tag;
int actualTag = actualType.tag;
if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.BOOLEAN:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.STRING:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {
return TypeTestResult.FALSE;
}
break;
}
}
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY ||
targetTag == TypeTags.READONLY) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.ANYDATA:
case TypeTags.TYPEDESC:
if (targetTag == TypeTags.ANY) {
return TypeTestResult.TRUE;
}
break;
default:
}
if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {
return TypeTestResult.FALSE;
}
if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)
&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {
return checkBuiltInIntSubtypeWidenPossible(actualType, targetType);
}
if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {
return TypeTestResult.TRUE;
}
return TypeTestResult.NOT_FOUND;
}
private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {
int actualTag = actualType.tag;
switch (targetType.tag) {
case TypeTags.INT:
if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED32_INT:
if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||
actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED16_INT:
if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED32_INT:
if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED16_INT:
if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.BYTE:
if (actualTag == TypeTags.UNSIGNED8_INT) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED8_INT:
if (actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
}
return TypeTestResult.NOT_FOUND;
}
public boolean isImplicityCastable(BType actualType, BType targetType) {
/* The word Builtin refers for Compiler known types. */
BType newTargetType = targetType;
if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {
newTargetType = symTable.anyType;
} else if (targetType.tag == TypeTags.INTERSECTION) {
newTargetType = ((BIntersectionType) targetType).effectiveType;
}
TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);
if (result != TypeTestResult.NOT_FOUND) {
return result == TypeTestResult.TRUE;
}
if (isValueType(targetType) &&
(actualType.tag == TypeTags.FINITE ||
(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()
.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {
return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT
|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;
} else if (targetType.tag == TypeTags.ERROR
&& (actualType.tag == TypeTags.UNION
&& isAllErrorMembers((BUnionType) actualType))) {
return true;
}
return false;
}
public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType, SymbolEnv env) {
if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||
sourceType == targetType) {
return true;
}
IntersectionContext intersectionContext = IntersectionContext.compilerInternalIntersectionTestContext();
BType errorIntersection = getTypeIntersection(intersectionContext, sourceType, symTable.errorType, env);
if (errorIntersection != symTable.semanticError &&
getTypeIntersection(intersectionContext, symTable.errorType, targetType, env)
== symTable.semanticError) {
return false;
}
if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {
return true;
}
if (isNumericConversionPossible(expr, sourceType, targetType)) {
return true;
}
if (sourceType.tag == TypeTags.ANY && targetType.tag == TypeTags.READONLY) {
return true;
}
boolean validTypeCast = false;
if (sourceType instanceof BUnionType) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType, env,
intersectionContext)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType instanceof BUnionType) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType, env,
intersectionContext)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (sourceType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (validTypeCast) {
if (isValueType(sourceType)) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
}
return true;
}
return false;
}
boolean isNumericConversionPossible(BLangExpression expr, BType sourceType,
BType targetType) {
final boolean isSourceNumericType = isBasicNumericType(sourceType);
final boolean isTargetNumericType = isBasicNumericType(targetType);
if (isSourceNumericType && isTargetNumericType) {
return true;
}
if (targetType.tag == TypeTags.UNION) {
HashSet<Integer> typeTags = new HashSet<>();
for (BType bType : ((BUnionType) targetType).getMemberTypes()) {
if (isBasicNumericType(bType)) {
typeTags.add(bType.tag);
if (typeTags.size() > 1) {
return false;
}
}
}
}
if (!isTargetNumericType && targetType.tag != TypeTags.UNION) {
return false;
}
if (isSourceNumericType) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
return true;
}
switch (sourceType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
for (BType memType : ((BUnionType) sourceType).getMemberTypes()) {
if (isBasicNumericType(memType) ||
(memType.tag == TypeTags.FINITE &&
finiteTypeContainsNumericTypeValues((BFiniteType) memType))) {
return true;
}
}
break;
case TypeTags.FINITE:
if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {
return true;
}
break;
}
return false;
}
private boolean isAllErrorMembers(BUnionType actualType) {
return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));
}
public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {
if (!isImplicityCastable(actualType, expType)) {
return;
}
BLangTypeConversionExpr implicitConversionExpr =
(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
implicitConversionExpr.pos = expr.pos;
implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;
implicitConversionExpr.type = expType;
implicitConversionExpr.targetType = expType;
implicitConversionExpr.internal = true;
expr.impConversionExpr = implicitConversionExpr;
}
public BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
public boolean checkListenerCompatibilityAtServiceDecl(BType type) {
if (type.tag == TypeTags.UNION) {
int listenerCompatibleTypeCount = 0;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (memberType.tag != TypeTags.ERROR) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
listenerCompatibleTypeCount++;
}
}
return listenerCompatibleTypeCount > 0;
}
return checkListenerCompatibility(type);
}
public boolean checkListenerCompatibility(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
for (BType memberType : unionType.getMemberTypes()) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
}
return true;
}
if (type.tag != TypeTags.OBJECT) {
return false;
}
BObjectType rhsType = (BObjectType) type;
List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;
ListenerValidationModel listenerValidationModel = new ListenerValidationModel(this, symTable);
return listenerValidationModel.checkMethods(rhsFuncs);
}
public boolean isValidErrorDetailType(BType detailType) {
switch (detailType.tag) {
case TypeTags.MAP:
case TypeTags.RECORD:
return isAssignable(detailType, symTable.detailType);
}
return false;
}
private boolean isSealedRecord(BType recordType) {
return recordType.getKind() == TypeKind.RECORD && ((BRecordType) recordType).sealed;
}
private boolean isNullable(BType fieldType) {
return fieldType.isNullable();
}
private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType t, BType s) {
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
if (t == s) {
return true;
}
return t.tag == s.tag;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE &&
isSameType(t.constraint, ((BFutureType) s).constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes);
}
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (((!t.tupleTypes.isEmpty() && checkAllTupleMembersBelongNoType(t.tupleTypes)) ||
(t.restType != null && t.restType.tag == TypeTags.NONE)) &&
!(s.tag == TypeTags.ARRAY && ((BArrayType) s).state == BArrayState.OPEN)) {
return true;
}
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
BType sourceRestType = source.restType;
BType targetRestType = t.restType;
if ((sourceRestType == null || targetRestType == null) && sourceRestType != targetRestType) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
if (sourceRestType == null || targetRestType == symTable.noType) {
return true;
}
return isSameType(sourceRestType, targetRestType, this.unresolvedTypes);
}
@Override
public Boolean visit(BStreamType t, BType s) {
return s.tag == TypeTags.STREAM && isSameStreamType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
if (inOrderedType) {
inOrderedType = false;
return isSimpleBasicType(s.tag) && checkUnionHasSameFiniteType(tUnionType.getMemberTypes(), s);
}
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size());
sourceTypes.add(sUnionType);
sourceTypes.addAll(sUnionType.getMemberTypes());
targetTypes.add(tUnionType);
targetTypes.addAll(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
if (inOrderedType) {
inOrderedType = false;
return checkValueSpaceHasSameType(t, s);
}
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
};
private boolean checkUnionHasSameFiniteType(LinkedHashSet<BType> memberTypes, BType baseType) {
for (BType type : memberTypes) {
if (type.tag != TypeTags.FINITE) {
return false;
}
boolean isValueSpaceSameType = false;
for (BLangExpression expr : ((BFiniteType) type).getValueSpace()) {
isValueSpaceSameType = isSameType(expr.type, baseType);
if (!isValueSpaceSameType) {
break;
}
}
return isValueSpaceSameType;
}
return false;
}
private boolean checkValueSpaceHasSameType(BFiniteType finiteType, BType baseType) {
if (baseType.tag == TypeTags.FINITE) {
return finiteType == baseType;
}
boolean isValueSpaceSameType = false;
for (BLangExpression expr : finiteType.getValueSpace()) {
isValueSpaceSameType = isSameType(expr.type, baseType);
if (!isValueSpaceSameType) {
break;
}
}
return isValueSpaceSameType;
}
private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
if (!Symbols.isOptional(lhsField.symbol)) {
return false;
}
continue;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
return rhsFields.entrySet().stream().allMatch(
fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));
}
private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc,
Set<TypePair> unresolvedTypes) {
return rhsFuncList.stream()
.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))
.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))
.findFirst()
.orElse(null);
}
private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {
if (Symbols.isPrivate(lhsSym)) {
return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)
&& lhsSym.owner.name.equals(rhsSym.owner.name);
} else if (Symbols.isPublic(lhsSym)) {
return Symbols.isPublic(rhsSym);
}
return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);
}
private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
if (source.tag == TypeTags.UNION && ((BUnionType) source).isCyclic) {
unresolvedTypes.add(pair);
}
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION || source.tag == TypeTags.JSON || source.tag == TypeTags.ANYDATA) {
sourceTypes.addAll(getEffectiveMemberTypes((BUnionType) source));
} else {
sourceTypes.add(source);
}
boolean targetIsAUnion = false;
if (target.tag == TypeTags.UNION) {
targetIsAUnion = true;
targetTypes.addAll(getEffectiveMemberTypes((BUnionType) target));
} else {
targetTypes.add(target);
}
var sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType sMember = sourceIterator.next();
if (sMember.tag == TypeTags.NEVER) {
sourceIterator.remove();
continue;
}
if (sMember.tag == TypeTags.FINITE && isAssignable(sMember, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (sMember.tag == TypeTags.XML &&
isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (!isValueType(sMember)) {
if (!targetIsAUnion) {
continue;
}
BUnionType targetUnion = (BUnionType) target;
if (sMember instanceof BUnionType) {
BUnionType sUnion = (BUnionType) sMember;
if (sUnion.isCyclic && targetUnion.isCyclic) {
unresolvedTypes.add(new TypePair(sUnion, targetUnion));
if (isAssignable(sUnion, targetUnion, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
}
}
if (sMember.tag == TypeTags.READONLY) {
unresolvedTypes.add(new TypePair(sMember, targetUnion));
if (isAssignable(sMember, targetUnion, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
}
continue;
}
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
while (targetIterator.hasNext()) {
BType t = targetIterator.next();
if (isAssignable(sMember, t, unresolvedTypes)) {
sourceIterator.remove();
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType sourceMember = sourceIterator.next();
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
boolean selfReferencedSource = (sourceMember != source) &&
isSelfReferencedStructuredType(source, sourceMember);
while (targetIterator.hasNext()) {
BType targetMember = targetIterator.next();
boolean selfReferencedTarget = isSelfReferencedStructuredType(target, targetMember);
if (selfReferencedTarget && selfReferencedSource && (sourceMember.tag == targetMember.tag)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
if (isAssignable(sourceMember, targetMember, unresolvedTypes)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
unresolvedTypes.add(pair);
return true;
}
public boolean isSelfReferencedStructuredType(BType source, BType s) {
if (source == s) {
return true;
}
if (s.tag == TypeTags.ARRAY) {
return isSelfReferencedStructuredType(source, ((BArrayType) s).eType);
}
if (s.tag == TypeTags.MAP) {
return isSelfReferencedStructuredType(source, ((BMapType) s).constraint);
}
if (s.tag == TypeTags.TABLE) {
return isSelfReferencedStructuredType(source, ((BTableType) s).constraint);
}
return false;
}
public BType updateSelfReferencedWithNewType(BType source, BType s, BType target) {
if (s.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) s;
if (arrayType.eType == source) {
return new BArrayType(target, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
}
}
if (s.tag == TypeTags.MAP) {
BMapType mapType = (BMapType) s;
if (mapType.constraint == source) {
return new BMapType(mapType.tag, target, mapType.tsymbol, mapType.flags);
}
}
if (s.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) s;
if (tableType.constraint == source) {
return new BTableType(tableType.tag, target, tableType.tsymbol,
tableType.flags);
} else if (tableType.constraint instanceof BMapType) {
return updateSelfReferencedWithNewType(source, (BMapType) tableType.constraint, target);
}
}
return s;
}
public static void fixSelfReferencingSameUnion(BType originalMemberType, BUnionType origUnionType,
BType immutableMemberType, BUnionType newImmutableUnion,
LinkedHashSet<BType> readOnlyMemTypes) {
boolean sameMember = originalMemberType == immutableMemberType;
if (originalMemberType.tag == TypeTags.ARRAY) {
var arrayType = (BArrayType) originalMemberType;
if (origUnionType == arrayType.eType) {
if (sameMember) {
BArrayType newArrayType = new BArrayType(newImmutableUnion, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
readOnlyMemTypes.add(newArrayType);
} else {
((BArrayType) immutableMemberType).eType = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.MAP) {
var mapType = (BMapType) originalMemberType;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol, mapType.flags);
readOnlyMemTypes.add(newMapType);
} else {
((BMapType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.TABLE) {
var tableType = (BTableType) originalMemberType;
if (origUnionType == tableType.constraint) {
if (sameMember) {
BTableType newTableType = new BTableType(tableType.tag, newImmutableUnion, tableType.tsymbol,
tableType.flags);
readOnlyMemTypes.add(newTableType);
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
return;
}
var immutableConstraint = ((BTableType) immutableMemberType).constraint;
if (tableType.constraint.tag == TypeTags.MAP) {
sameMember = tableType.constraint == immutableConstraint;
var mapType = (BMapType) tableType.constraint;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol,
mapType.flags);
((BTableType) immutableMemberType).constraint = newMapType;
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
}
readOnlyMemTypes.add(immutableMemberType);
}
}
} else {
readOnlyMemTypes.add(immutableMemberType);
}
}
private Set<BType> getEffectiveMemberTypes(BUnionType unionType) {
Set<BType> memTypes = new LinkedHashSet<>();
for (BType memberType : unionType.getMemberTypes()) {
switch (memberType.tag) {
case TypeTags.INTERSECTION:
BType effectiveType = ((BIntersectionType) memberType).effectiveType;
if (effectiveType.tag == TypeTags.UNION) {
memTypes.addAll(getEffectiveMemberTypes((BUnionType) effectiveType));
continue;
}
memTypes.add(effectiveType);
break;
case TypeTags.UNION:
memTypes.addAll(getEffectiveMemberTypes((BUnionType) memberType));
break;
default:
memTypes.add(memberType);
break;
}
}
return memTypes;
}
private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) {
if (targetType.tag == TypeTags.FINITE) {
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));
}
if (targetType.tag == TypeTags.UNION) {
List<BType> unionMemberTypes = getAllTypes(targetType);
return finiteType.getValueSpace().stream()
.allMatch(valueExpr -> unionMemberTypes.stream()
.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?
isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :
isAssignable(valueExpr.type, targetType, unresolvedTypes)));
}
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));
}
boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {
if (type.tag != TypeTags.FINITE) {
return false;
}
BFiniteType expType = (BFiniteType) type;
return expType.getValueSpace().stream().anyMatch(memberLiteral -> {
if (((BLangLiteral) memberLiteral).value == null) {
return literalExpr.value == null;
}
return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);
});
}
/**
* Method to check the literal assignability based on the types of the literals. For numeric literals the
* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple
* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both
* literals have same type and equivalent values.
*
* @param baseLiteral Literal based on which we check the assignability.
* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.
* @return true if assignable; false otherwise.
*/
boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {
if (baseLiteral.getKind() != candidateLiteral.getKind()) {
return false;
}
Object baseValue = baseLiteral.value;
Object candidateValue = candidateLiteral.value;
int candidateTypeTag = candidateLiteral.type.tag;
switch (baseLiteral.type.tag) {
case TypeTags.BYTE:
if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&
!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.INT:
if (candidateTypeTag == TypeTags.INT) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.FLOAT:
String baseValueStr = String.valueOf(baseValue);
String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;
if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {
return false;
}
double baseDoubleVal = Double.parseDouble(baseValueStr);
double candidateDoubleVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDoubleVal = ((Long) candidateValue).doubleValue();
return baseDoubleVal == candidateDoubleVal;
} else if (candidateTypeTag == TypeTags.FLOAT) {
candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));
return baseDoubleVal == candidateDoubleVal;
}
break;
case TypeTags.DECIMAL:
BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);
BigDecimal candidateDecimalVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||
candidateTypeTag == TypeTags.DECIMAL) {
if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {
return false;
}
candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
}
break;
default:
return baseValue.equals(candidateValue);
}
return false;
}
boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
boolean isCharLiteralValue(String literal) {
return (literal.codePoints().count() == 1);
}
/**
* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to
* the target type.
*
* @param finiteType the finite type
* @param targetType the target type
* @return a new finite type if at least one value in the value space of the specified finiteType is
* assignable to targetType (the same if all are assignable), else semanticError
*/
BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {
if (isAssignable(finiteType, targetType)) {
return finiteType;
}
Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream()
.filter(
expr -> isAssignable(expr.type, targetType) ||
isAssignableToFiniteType(targetType, (BLangLiteral) expr) ||
(targetType.tag == TypeTags.UNION &&
((BUnionType) targetType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,
(BLangLiteral) expr))))
.collect(Collectors.toSet());
if (matchingValues.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
finiteType.tsymbol.pkgID, null,
finiteType.tsymbol.owner, finiteType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
/**
* Method to retrieve a type representing all the member types of a union type that are assignable to
* the target type.
*
* @param intersectionContext
* @param unionType the union type
* @param targetType the target type
* @return a single type or a new union type if at least one member type of the union type is
* assignable to targetType, else semanticError
*/
BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType, SymbolEnv env,
IntersectionContext intersectionContext) {
List<BType> intersection = new LinkedList<>();
unionType.getMemberTypes().forEach(memType -> {
BType memberIntersectionType = getTypeIntersection(intersectionContext, memType, targetType, env);
if (memberIntersectionType != symTable.semanticError) {
intersection.add(memberIntersectionType);
}
});
if (intersection.isEmpty()) {
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.get(0);
} else {
return BUnionType.create(null, new LinkedHashSet<>(intersection));
}
}
boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {
if (!isPureType(lhsType) || !isPureType(rhsType)) {
return false;
}
if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {
return true;
}
Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType);
Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType);
return equalityIntersectionExists(lhsTypes, rhsTypes);
}
private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) {
if ((lhsTypes.contains(symTable.anydataType) &&
rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||
(rhsTypes.contains(symTable.anydataType) &&
lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {
return true;
}
boolean matchFound = lhsTypes
.stream()
.anyMatch(s -> rhsTypes
.stream()
.anyMatch(t -> isSameType(s, t)));
if (!matchFound) {
matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);
}
return matchFound;
}
/**
* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual
* maps/arrays.
*
* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]
*
* @param bType the type for which member types needs to be identified
* @return a set containing all the retrieved member types
*/
public Set<BType> expandAndGetMemberTypesRecursive(BType bType) {
Set<BType> memberTypes = new LinkedHashSet<>();
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.INT:
memberTypes.add(symTable.intType);
memberTypes.add(symTable.byteType);
break;
case TypeTags.FINITE:
BFiniteType expType = (BFiniteType) bType;
expType.getValueSpace().forEach(value -> {
memberTypes.add(value.type);
});
break;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) bType;
unionType.getMemberTypes().forEach(member -> {
memberTypes.addAll(expandAndGetMemberTypesRecursive(member));
});
break;
case TypeTags.ARRAY:
BType arrayElementType = ((BArrayType) bType).getElementType();
if (((BArrayType) bType).getSize() != -1) {
memberTypes.add(new BArrayType(arrayElementType));
}
if (arrayElementType.tag == TypeTags.UNION) {
Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);
elementUnionTypes.forEach(elementUnionType -> {
memberTypes.add(new BArrayType(elementUnionType));
});
}
memberTypes.add(bType);
break;
case TypeTags.MAP:
BType mapConstraintType = ((BMapType) bType).getConstraint();
if (mapConstraintType.tag == TypeTags.UNION) {
Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);
constraintUnionTypes.forEach(constraintUnionType -> {
memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));
});
}
memberTypes.add(bType);
break;
default:
memberTypes.add(bType);
}
return memberTypes;
}
private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {
if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {
return false;
}
List<BType> lhsMemberTypes = lhsType.getTupleTypes();
List<BType> rhsMemberTypes = rhsType.getTupleTypes();
for (int i = 0; i < lhsType.getTupleTypes().size(); i++) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),
expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {
return false;
}
}
return true;
}
private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) {
for (BType lhsMemberType : lhsTypes) {
switch (lhsMemberType.tag) {
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
break;
case TypeTags.JSON:
if (jsonEqualityIntersectionExists(rhsTypes)) {
return true;
}
break;
case TypeTags.TUPLE:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,
(BTupleType) lhsMemberType))) {
return true;
}
break;
case TypeTags.ARRAY:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),
expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,
(BTupleType) rhsMemberType))) {
return true;
}
break;
case TypeTags.MAP:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),
expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {
return true;
}
if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&
rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
mapRecordEqualityIntersectionExists((BMapType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
break;
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||
checkStructEquivalency(lhsMemberType, rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
recordEqualityIntersectionExists((BRecordType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&
jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
mapRecordEqualityIntersectionExists((BMapType) rhsMemberType,
(BRecordType) lhsMemberType))) {
return true;
}
break;
}
}
return false;
}
private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {
Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);
return tupleType.tupleTypes.stream()
.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,
expandAndGetMemberTypesRecursive(tupleMemType)));
}
private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {
Map<String, BField> lhsFields = lhsType.fields;
Map<String, BField> rhsFields = rhsType.fields;
List<Name> matchedFieldNames = new ArrayList<>();
for (BField lhsField : lhsFields.values()) {
if (rhsFields.containsKey(lhsField.name.value)) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(
rhsFields.get(lhsField.name.value).type))) {
return false;
}
matchedFieldNames.add(lhsField.getName());
} else {
if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {
break;
}
if (rhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(rhsType.restFieldType))) {
return false;
}
}
}
for (BField rhsField : rhsFields.values()) {
if (matchedFieldNames.contains(rhsField.getName())) {
continue;
}
if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {
if (lhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),
expandAndGetMemberTypesRecursive(lhsType.restFieldType))) {
return false;
}
}
}
return true;
}
private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {
Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) &&
!equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))) {
return false;
}
}
return true;
}
private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) {
for (BType type : typeSet) {
switch (type.tag) {
case TypeTags.MAP:
if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {
return true;
}
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
if (recordType.fields.values().stream()
.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||
!isAssignable(field.type, symTable.errorType))) {
return true;
}
break;
default:
if (isAssignable(type, symTable.jsonType)) {
return true;
}
}
}
return false;
}
public BType getRemainingMatchExprType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
case TypeTags.TUPLE:
return getRemainingType((BTupleType) originalType, typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BType typeToRemove) {
switch (typeToRemove.tag) {
case TypeTags.TUPLE:
return getRemainingType(originalType, (BTupleType) typeToRemove);
case TypeTags.ARRAY:
return getRemainingType(originalType, (BArrayType) typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BTupleType typeToRemove) {
if (originalType.restType != null) {
return originalType;
}
List<BType> originalTupleTypes = new ArrayList<>(originalType.tupleTypes);
List<BType> typesToRemove = new ArrayList<>(typeToRemove.tupleTypes);
if (originalTupleTypes.size() < typesToRemove.size()) {
return originalType;
}
List<BType> tupleTypes = new ArrayList<>();
for (int i = 0; i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typesToRemove.get(i)));
}
if (typeToRemove.restType == null) {
return new BTupleType(tupleTypes);
}
if (originalTupleTypes.size() == typesToRemove.size()) {
return originalType;
}
for (int i = typesToRemove.size(); i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typeToRemove.restType));
}
return new BTupleType(tupleTypes);
}
private BType getRemainingType(BTupleType originalType, BArrayType typeToRemove) {
BType eType = typeToRemove.eType;
List<BType> tupleTypes = new ArrayList<>();
for (BType tupleType : originalType.tupleTypes) {
tupleTypes.add(getRemainingMatchExprType(tupleType, eType));
}
BTupleType remainingType = new BTupleType(tupleTypes);
if (originalType.restType != null) {
remainingType.restType = getRemainingMatchExprType(originalType.restType, eType);
}
return remainingType;
}
public BType getRemainingType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
case TypeTags.READONLY:
return getRemainingType((BReadonlyType) originalType, typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BReadonlyType originalType, BType removeType) {
if (removeType.tag == TypeTags.ERROR) {
return symTable.anyAndReadonly;
}
return originalType;
}
public BType getTypeIntersection(IntersectionContext intersectionContext, BType lhsType, BType rhsType,
SymbolEnv env) {
List<BType> rhsTypeComponents = getAllTypes(rhsType);
LinkedHashSet<BType> intersection = new LinkedHashSet<>();
for (BType rhsComponent : rhsTypeComponents) {
BType it = getIntersection(intersectionContext, lhsType, env, rhsComponent);
if (it != null) {
intersection.add(it);
}
}
if (intersection.isEmpty()) {
if (lhsType.tag == TypeTags.NULL_SET) {
return lhsType;
}
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.toArray(new BType[0])[0];
} else {
return BUnionType.create(null, intersection);
}
}
private BType getIntersection(IntersectionContext intersectionContext, BType lhsType, SymbolEnv env, BType type) {
if (intersectionContext.preferNonGenerativeIntersection) {
if (isAssignable(type, lhsType)) {
return type;
} else if (isAssignable(lhsType, type)) {
return lhsType;
}
}
if (type.tag == TypeTags.ERROR && lhsType.tag == TypeTags.ERROR) {
BType intersectionType = getIntersectionForErrorTypes(intersectionContext, lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordIntersection(intersectionContext, (BRecordType) lhsType,
(BRecordType) type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordAndMapIntersection(intersectionContext.switchLeft(),
lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(intersectionContext.switchRight(),
type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (!intersectionContext.preferNonGenerativeIntersection && isAssignable(type, lhsType)) {
return type;
} else if (!intersectionContext.preferNonGenerativeIntersection && isAssignable(lhsType, type)) {
return lhsType;
} else if (lhsType.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (lhsType.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) lhsType, type, env,
intersectionContext);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) type, lhsType, env,
intersectionContext);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.NULL_SET) {
return type;
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(intersectionContext,
type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.ARRAY && lhsType.tag == TypeTags.TUPLE) {
BType intersectionType = createArrayAndTupleIntersection(intersectionContext,
(BArrayType) type, (BTupleType) lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.TUPLE && lhsType.tag == TypeTags.ARRAY) {
BType intersectionType = createArrayAndTupleIntersection(intersectionContext,
(BArrayType) lhsType, (BTupleType) type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
}
return null;
}
private BType createArrayAndTupleIntersection(IntersectionContext intersectionContext,
BArrayType arrayType, BTupleType tupleType, SymbolEnv env) {
List<BType> tupleMemberTypes = new ArrayList<>();
for (BType memberType : tupleType.tupleTypes) {
BType intersectionType = getTypeIntersection(intersectionContext, memberType, arrayType.eType, env);
if (intersectionType == symTable.semanticError) {
return symTable.semanticError;
}
tupleMemberTypes.add(intersectionType);
}
if (tupleType.restType == null) {
return new BTupleType(null, tupleMemberTypes);
}
BType restIntersectionType = getTypeIntersection(intersectionContext,
tupleType.restType, arrayType.eType, env);
if (restIntersectionType == symTable.semanticError) {
return new BTupleType(null, tupleMemberTypes);
}
return new BTupleType(null, tupleMemberTypes, restIntersectionType, 0);
}
private BType getIntersectionForErrorTypes(IntersectionContext intersectionContext,
BType lhsType, BType rhsType, SymbolEnv env) {
BType detailTypeOne = ((BErrorType) lhsType).detailType;
BType detailTypeTwo = ((BErrorType) rhsType).detailType;
if (!intersectionContext.compilerInternalIntersectionTest
&& (isSealedRecord(detailTypeOne) || isSealedRecord(detailTypeTwo))) {
return symTable.semanticError;
}
BType detailIntersectionType = getTypeIntersection(intersectionContext, detailTypeOne, detailTypeTwo, env);
if (detailIntersectionType == symTable.semanticError) {
return symTable.semanticError;
}
BErrorType intersectionErrorType = createErrorType(lhsType, rhsType, detailIntersectionType, env);
if (!intersectionContext.compilerInternalIntersectionTest) {
BTypeSymbol errorTSymbol = intersectionErrorType.tsymbol;
BLangErrorType bLangErrorType = TypeDefBuilderHelper.createBLangErrorType(symTable.builtinPos,
intersectionErrorType, env, anonymousModelHelper);
BLangTypeDefinition errorTypeDefinition = TypeDefBuilderHelper.addTypeDefinition(
intersectionErrorType, errorTSymbol, bLangErrorType, env);
errorTypeDefinition.pos = symTable.builtinPos;
}
return intersectionErrorType;
}
private BType createRecordIntersection(IntersectionContext diagnosticContext,
BRecordType recordTypeOne, BRecordType recordTypeTwo, SymbolEnv env) {
BRecordType newType = createAnonymousRecord(env);
if (!populateRecordFields(diagnosticContext.switchLeft(), newType, recordTypeOne, env,
getConstraint(recordTypeTwo)) ||
!populateRecordFields(diagnosticContext.switchRight(), newType, recordTypeTwo, env,
getConstraint(recordTypeOne))) {
return symTable.semanticError;
}
newType.restFieldType = getTypeIntersection(diagnosticContext, recordTypeOne.restFieldType,
recordTypeTwo.restFieldType, env);
if (newType.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
if (!diagnosticContext.compilerInternalIntersectionTest) {
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(
newType, env.enclPkg.packageID, symTable, symTable.builtinPos);
BLangTypeDefinition recordTypeDef = TypeDefBuilderHelper.addTypeDefinition(
newType, newType.tsymbol, recordTypeNode, env);
env.enclPkg.symbol.scope.define(newType.tsymbol.name, newType.tsymbol);
recordTypeDef.pos = symTable.builtinPos;
}
return newType;
}
private BType getConstraint(BRecordType recordType) {
if (recordType.sealed) {
return symTable.neverType;
}
return recordType.restFieldType;
}
private BRecordType createAnonymousRecord(SymbolEnv env) {
EnumSet<Flag> flags = EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.packageID, null,
env.scope.owner, null, VIRTUAL);
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false,
symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, symTable.builtinPos);
recordSymbol.scope = new Scope(recordSymbol);
BRecordType recordType = new BRecordType(recordSymbol);
recordType.tsymbol = recordSymbol;
recordSymbol.type = recordType;
return recordType;
}
private BType createRecordAndMapIntersection(IntersectionContext intersectionContext,
BType type, BType mapType, SymbolEnv env) {
BRecordType intersectionRecord = createAnonymousRecord(env);
if (!populateRecordFields(intersectionContext, intersectionRecord, type, env,
((BMapType) mapType).constraint)) {
return symTable.semanticError;
}
if (intersectionContext.compilerInternalIntersectionTest && ((BRecordType) type).sealed) {
return intersectionRecord;
}
intersectionRecord.restFieldType = getRestFieldIntersectionType(intersectionContext,
type, (BMapType) mapType, env);
if (intersectionRecord.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
if (!intersectionContext.compilerInternalIntersectionTest) {
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(
intersectionRecord, env.enclPkg.packageID, symTable, symTable.builtinPos);
BLangTypeDefinition recordTypeDef = TypeDefBuilderHelper.addTypeDefinition(
intersectionRecord, intersectionRecord.tsymbol, recordTypeNode, env);
env.enclPkg.symbol.scope.define(intersectionRecord.tsymbol.name, intersectionRecord.tsymbol);
recordTypeDef.pos = symTable.builtinPos;
}
return intersectionRecord;
}
private BType getRestFieldIntersectionType(IntersectionContext intersectionContext,
BType type, BMapType mapType, SymbolEnv env) {
if (type.tag == TypeTags.RECORD) {
return getTypeIntersection(intersectionContext,
((BRecordType) type).restFieldType, mapType.constraint, env);
} else {
return getTypeIntersection(intersectionContext,
((BMapType) type).constraint, mapType.constraint, env);
}
}
private BErrorType createErrorType(BType lhsType, BType rhsType, BType detailType, SymbolEnv env) {
BErrorType lhsErrorType = (BErrorType) lhsType;
BErrorType rhsErrorType = (BErrorType) rhsType;
BErrorType errorType = createErrorType(detailType, lhsType.flags, env);
errorType.tsymbol.flags |= rhsType.flags;
errorType.typeIdSet = BTypeIdSet.getIntersection(lhsErrorType.typeIdSet, rhsErrorType.typeIdSet);
return errorType;
}
public BErrorType createErrorType(BType detailType, long flags, SymbolEnv env) {
String name = anonymousModelHelper.getNextAnonymousIntersectionErrorTypeName(env.enclPkg.packageID);
BErrorTypeSymbol errorTypeSymbol = Symbols.createErrorSymbol(flags, names.fromString(name),
env.enclPkg.symbol.pkgID, null,
env.scope.owner, symTable.builtinPos, VIRTUAL);
errorTypeSymbol.scope = new Scope(errorTypeSymbol);
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
errorType.typeIdSet = BTypeIdSet.emptySet();
return errorType;
}
private boolean populateRecordFields(IntersectionContext diagnosticContext, BRecordType newType,
BType originalType, SymbolEnv env, BType constraint) {
BTypeSymbol intersectionRecordSymbol = newType.tsymbol;
if (originalType.getKind() != TypeKind.RECORD) {
return true;
}
BRecordType originalRecordType = (BRecordType) originalType;
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
for (BField origField : originalRecordType.fields.values()) {
org.wso2.ballerinalang.compiler.util.Name origFieldName = origField.name;
String nameString = origFieldName.value;
if (!validateRecordFieldDefaultValueForIntersection(diagnosticContext, origField, originalRecordType)) {
return false;
}
BType recordFieldType = validateRecordField(diagnosticContext, newType, origField, constraint, env);
if (recordFieldType == symTable.semanticError) {
return false;
}
BVarSymbol recordFieldSymbol = new BVarSymbol(origField.symbol.flags, origFieldName,
env.enclPkg.packageID, recordFieldType,
intersectionRecordSymbol, origField.pos, SOURCE);
if (recordFieldType.tag == TypeTags.INVOKABLE && recordFieldType.tsymbol != null) {
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) recordFieldType.tsymbol;
BInvokableSymbol invokableSymbol = (BInvokableSymbol) recordFieldSymbol;
invokableSymbol.params = tsymbol.params;
invokableSymbol.restParam = tsymbol.restParam;
invokableSymbol.retType = tsymbol.returnType;
invokableSymbol.flags = tsymbol.flags;
}
fields.put(nameString, new BField(origFieldName, null, recordFieldSymbol));
intersectionRecordSymbol.scope.define(origFieldName, recordFieldSymbol);
}
newType.fields.putAll(fields);
return true;
}
private boolean validateRecordFieldDefaultValueForIntersection(IntersectionContext diagnosticContext,
BField field, BRecordType recordType) {
if (field.symbol != null && field.symbol.isDefaultable && !diagnosticContext.compilerInternalIntersectionTest) {
diagnosticContext.logError(DiagnosticErrorCode.INTERSECTION_NOT_ALLOWED_WITH_TYPE, recordType, field.name);
return false;
}
return true;
}
private BType validateRecordField(IntersectionContext intersectionContext,
BRecordType newType, BField origField, BType constraint, SymbolEnv env) {
if (hasField(newType, origField)) {
return validateOverlappingFields(newType, origField);
}
if (constraint == null) {
return origField.type;
}
BType fieldType = getTypeIntersection(intersectionContext, origField.type, constraint, env);
if (fieldType != symTable.semanticError) {
return fieldType;
}
if (Symbols.isOptional(origField.symbol)) {
return null;
}
return symTable.semanticError;
}
private boolean hasField(BRecordType recordType, BField origField) {
return recordType.fields.containsKey(origField.name.value);
}
private BType validateOverlappingFields(BRecordType newType, BField origField) {
if (!hasField(newType, origField)) {
return origField.type;
}
BField overlappingField = newType.fields.get(origField.name.value);
if (isAssignable(overlappingField.type, origField.type)) {
return overlappingField.type;
}
if (isAssignable(origField.type, overlappingField.type)) {
return origField.type;
}
return symTable.semanticError;
}
private void removeErrorFromReadonlyType(List<BType> remainingTypes) {
Iterator<BType> remainingIterator = remainingTypes.listIterator();
boolean addAnyAndReadOnly = false;
while (remainingIterator.hasNext()) {
BType remainingType = remainingIterator.next();
if (remainingType.tag != TypeTags.READONLY) {
continue;
}
remainingIterator.remove();
addAnyAndReadOnly = true;
}
if (addAnyAndReadOnly) {
remainingTypes.add(symTable.anyAndReadonly);
}
}
private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) {
List<BType> remainingTypes = getAllTypes(originalType);
boolean hasErrorToRemove = false;
for (BType removeType : removeTypes) {
remainingTypes.removeIf(type -> isAssignable(type, removeType));
if (!hasErrorToRemove && removeType.tag == TypeTags.ERROR) {
hasErrorToRemove = true;
}
}
if (hasErrorToRemove) {
removeErrorFromReadonlyType(remainingTypes);
}
List<BType> finiteTypesToRemove = new ArrayList<>();
List<BType> finiteTypesToAdd = new ArrayList<>();
for (BType remainingType : remainingTypes) {
if (remainingType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) remainingType;
finiteTypesToRemove.add(finiteType);
BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);
if (remainingTypeWithMatchesRemoved != symTable.semanticError) {
finiteTypesToAdd.add(remainingTypeWithMatchesRemoved);
}
}
}
remainingTypes.removeAll(finiteTypesToRemove);
remainingTypes.addAll(finiteTypesToAdd);
if (remainingTypes.size() == 1) {
return remainingTypes.get(0);
}
if (remainingTypes.isEmpty()) {
return symTable.nullSet;
}
return BUnionType.create(null, new LinkedHashSet<>(remainingTypes));
}
private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) {
Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>();
for (BLangExpression valueExpr : originalType.getValueSpace()) {
boolean matchExists = false;
for (BType remType : removeTypes) {
if (isAssignable(valueExpr.type, remType) ||
isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {
matchExists = true;
break;
}
}
if (!matchExists) {
remainingValueSpace.add(valueExpr);
}
}
if (remainingValueSpace.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
originalType.tsymbol.pkgID, null,
originalType.tsymbol.owner, originalType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
public BType getSafeType(BType type, boolean liftNil, boolean liftError) {
switch (type.tag) {
case TypeTags.JSON:
return new BJSONType((BJSONType) type, false);
case TypeTags.ANY:
return new BAnyType(type.tag, type.tsymbol, false);
case TypeTags.ANYDATA:
return new BAnydataType((BAnydataType) type, false);
case TypeTags.READONLY:
return new BReadonlyType(type.tag, type.tsymbol, false);
}
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes());
BUnionType errorLiftedType = BUnionType.create(null, memTypes);
if (liftNil) {
errorLiftedType.remove(symTable.nilType);
}
if (liftError) {
errorLiftedType.remove(symTable.errorType);
}
if (errorLiftedType.getMemberTypes().size() == 1) {
return errorLiftedType.getMemberTypes().toArray(new BType[0])[0];
}
return errorLiftedType;
}
public List<BType> getAllTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Lists.of(type);
}
List<BType> memberTypes = new ArrayList<>();
((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));
return memberTypes;
}
public boolean isAllowedConstantType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.NIL:
return true;
case TypeTags.MAP:
return isAllowedConstantType(((BMapType) type).constraint);
case TypeTags.FINITE:
BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];
return isAllowedConstantType(finiteValue.type);
default:
return false;
}
}
public boolean isValidLiteral(BLangLiteral literal, BType targetType) {
BType literalType = literal.type;
if (literalType.tag == targetType.tag) {
return true;
}
switch (targetType.tag) {
case TypeTags.BYTE:
return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);
case TypeTags.DECIMAL:
return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;
case TypeTags.FLOAT:
return literalType.tag == TypeTags.INT;
case TypeTags.SIGNED32_INT:
return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);
case TypeTags.SIGNED16_INT:
return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);
case TypeTags.SIGNED8_INT:
return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED32_INT:
return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED16_INT:
return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED8_INT:
return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);
case TypeTags.CHAR_STRING:
return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);
default:
return false;
}
}
/**
* Validate if the return type of the given function is a subtype of `error?`, containing `()`.
*
* @param function The function of which the return type should be validated
* @param diagnosticCode The code to log if the return type is invalid
*/
public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {
BType returnType = function.returnTypeNode.type;
if (returnType.tag == TypeTags.NIL) {
return;
}
if (returnType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes();
if (returnType.isNullable() &&
memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {
return;
}
}
dlog.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.982.0
*/
private static class TypePair {
BType sourceType;
BType targetType;
public TypePair(BType sourceType, BType targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
@Override
public int hashCode() {
return Objects.hash(sourceType, targetType);
}
}
/**
* A functional interface for parameterizing the type of type checking that needs to be done on the source and
* target types.
*
* @since 0.995.0
*/
private interface TypeEqualityPredicate {
boolean test(BType source, BType target, Set<TypePair> unresolvedTypes);
}
public boolean hasFillerValue(BType type) {
switch (type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.NIL:
case TypeTags.TABLE:
case TypeTags.ANYDATA:
case TypeTags.MAP:
case TypeTags.ANY:
case TypeTags.NEVER:
return true;
case TypeTags.ARRAY:
return checkFillerValue((BArrayType) type);
case TypeTags.FINITE:
return checkFillerValue((BFiniteType) type);
case TypeTags.UNION:
return checkFillerValue((BUnionType) type);
case TypeTags.OBJECT:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD:
return checkFillerValue((BRecordType) type);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));
default:
if (TypeTags.isIntegerTypeTag(type.tag)) {
return true;
}
return false;
}
}
private boolean checkFillerValue(BObjectType type) {
if ((type.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
return false;
}
BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;
if (initFunction == null) {
return true;
}
if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {
return false;
}
for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {
if (!bVarSymbol.isDefaultable) {
return false;
}
}
return true;
}
/**
* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill
* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and
* the default value for that type should be a member of the union precondition : value space should have at least
* one element
*
* @param type BFiniteType union or finite
* @return boolean whether type has a valid filler value or not
*/
private boolean checkFillerValue(BFiniteType type) {
if (type.isNullable()) {
return true;
}
if (type.getValueSpace().size() == 1) {
return true;
}
Iterator iterator = type.getValueSpace().iterator();
BLangExpression firstElement = (BLangExpression) iterator.next();
boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);
while (iterator.hasNext()) {
BLangExpression value = (BLangExpression) iterator.next();
if (!isSameBasicType(value.type, firstElement.type)) {
return false;
}
if (!defaultFillValuePresent && isImplicitDefaultValue(value)) {
defaultFillValuePresent = true;
}
}
return defaultFillValuePresent;
}
private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) {
for (BLangExpression expression : valueSpace) {
if (isImplicitDefaultValue(expression)) {
return true;
}
}
return false;
}
private boolean checkFillerValue(BUnionType type) {
if (type.isNullable()) {
return true;
}
Set<BType> memberTypes = new HashSet<>();
boolean hasFillerValue = false;
boolean defaultValuePresent = false;
boolean finiteTypePresent = false;
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.FINITE) {
Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace());
memberTypes.addAll(uniqueValues);
if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) {
defaultValuePresent = true;
}
finiteTypePresent = true;
} else {
memberTypes.add(member);
}
if (!hasFillerValue && hasFillerValue(member)) {
hasFillerValue = true;
}
}
if (!hasFillerValue) {
return false;
}
Iterator<BType> iterator = memberTypes.iterator();
BType firstMember = iterator.next();
while (iterator.hasNext()) {
if (!isSameBasicType(firstMember, iterator.next())) {
return false;
}
}
if (finiteTypePresent) {
return defaultValuePresent;
}
return true;
}
private boolean isSameBasicType(BType source, BType target) {
if (isSameType(source, target)) {
return true;
}
if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) {
return true;
}
return false;
}
private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) {
Set<BType> uniqueType = new HashSet<>();
for (BLangExpression expression : valueSpace) {
uniqueType.add(expression.type);
}
return uniqueType;
}
private boolean isImplicitDefaultValue(BLangExpression expression) {
if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {
BLangLiteral literalExpression = (BLangLiteral) expression;
BType literalExprType = literalExpression.type;
Object value = literalExpression.getValue();
switch (literalExprType.getKind()) {
case INT:
case BYTE:
return value.equals(Long.valueOf(0));
case STRING:
return value == null || value.equals("");
case DECIMAL:
case FLOAT:
return value.equals(String.valueOf(0.0));
case BOOLEAN:
return value.equals(Boolean.valueOf(false));
case NIL:
return true;
default:
return false;
}
}
return false;
}
private boolean checkFillerValue(BRecordType type) {
for (BField field : type.fields.values()) {
if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
continue;
}
if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private boolean checkFillerValue(BArrayType type) {
if (type.size == -1) {
return true;
}
return hasFillerValue(type.eType);
}
/**
* Get result type of the query output.
*
* @param type type of query expression.
* @return result type.
*/
public BType resolveExprType(BType type) {
switch (type.tag) {
case TypeTags.STREAM:
return ((BStreamType) type).constraint;
case TypeTags.TABLE:
return ((BTableType) type).constraint;
case TypeTags.ARRAY:
return ((BArrayType) type).eType;
case TypeTags.UNION:
List<BType> exprTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
for (BType returnType : exprTypes) {
switch (returnType.tag) {
case TypeTags.STREAM:
return ((BStreamType) returnType).constraint;
case TypeTags.TABLE:
return ((BTableType) returnType).constraint;
case TypeTags.ARRAY:
return ((BArrayType) returnType).eType;
case TypeTags.STRING:
case TypeTags.XML:
return returnType;
}
}
default:
return type;
}
}
private boolean isSimpleBasicType(int tag) {
switch (tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
return true;
default:
return (TypeTags.isIntegerTypeTag(tag)) || (TypeTags.isStringTypeTag(tag));
}
}
/**
* Check whether a type is an ordered type.
*
* @param type type.
* @param hasCycle whether there is a cycle.
* @return boolean whether the type is an ordered type or not.
*/
public boolean isOrderedType(BType type, boolean hasCycle) {
switch (type.tag) {
case TypeTags.UNION:
BUnionType unionType = (BUnionType) type;
if (hasCycle) {
return true;
}
if (unionType.isCyclic) {
hasCycle = true;
}
Set<BType> memberTypes = unionType.getMemberTypes();
boolean allMembersOrdered = false;
BType firstTypeInUnion = memberTypes.iterator().next();
for (BType memType : memberTypes) {
if (memType.tag == TypeTags.FINITE && firstTypeInUnion.tag == TypeTags.FINITE) {
Set<BLangExpression> valSpace = ((BFiniteType) firstTypeInUnion).getValueSpace();
BType baseExprType = valSpace.iterator().next().type;
if (!checkValueSpaceHasSameType((BFiniteType) memType, baseExprType)) {
return false;
}
} else if (memType.tag != firstTypeInUnion.tag && memType.tag != TypeTags.NIL &&
!isIntOrStringType(memType.tag, firstTypeInUnion.tag)) {
return false;
}
allMembersOrdered = isOrderedType(memType, hasCycle);
if (!allMembersOrdered) {
break;
}
}
return allMembersOrdered;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isOrderedType(elementType, hasCycle);
case TypeTags.TUPLE:
List<BType> tupleMemberTypes = ((BTupleType) type).tupleTypes;
for (BType memType : tupleMemberTypes) {
if (!isOrderedType(memType, hasCycle)) {
return false;
}
}
BType restType = ((BTupleType) type).restType;
return restType == null || isOrderedType(restType, hasCycle);
case TypeTags.FINITE:
boolean isValueSpaceOrdered = false;
Set<BLangExpression> valSpace = ((BFiniteType) type).getValueSpace();
BType baseExprType = valSpace.iterator().next().type;
for (BLangExpression expr : valSpace) {
if (!checkValueSpaceHasSameType((BFiniteType) type, baseExprType)) {
return false;
}
isValueSpaceOrdered = isOrderedType(expr.type, hasCycle);
if (!isValueSpaceOrdered) {
break;
}
}
return isValueSpaceOrdered;
default:
return isSimpleBasicType(type.tag);
}
}
private boolean isIntOrStringType(int firstTypeTag, int secondTypeTag) {
return ((TypeTags.isIntegerTypeTag(firstTypeTag)) && (TypeTags.isIntegerTypeTag(secondTypeTag))) ||
((TypeTags.isStringTypeTag(firstTypeTag)) && (TypeTags.isStringTypeTag(secondTypeTag)));
}
public boolean isUnionOfSimpleBasicTypes(BType type) {
if (type.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isSimpleBasicType(memType.tag)) {
return false;
}
}
return true;
}
return isSimpleBasicType(type.tag);
}
public boolean isSubTypeOfReadOnlyOrIsolatedObjectUnion(BType type) {
if (isInherentlyImmutableType(type) || Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
int tag = type.tag;
if (tag == TypeTags.OBJECT) {
return isIsolated(type);
}
if (tag != TypeTags.UNION) {
return false;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isSubTypeOfReadOnlyOrIsolatedObjectUnion(memberType)) {
return false;
}
}
return true;
}
private boolean isIsolated(BType type) {
return Symbols.isFlagOn(type.flags, Flags.ISOLATED);
}
BType getTypeWithoutNil(BType type) {
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
if (!unionType.isNullable()) {
return unionType;
}
List<BType> nonNilTypes = new ArrayList<>();
for (BType memberType : unionType.getMemberTypes()) {
if (!isAssignable(memberType, symTable.nilType)) {
nonNilTypes.add(memberType);
}
}
if (nonNilTypes.size() == 1) {
return nonNilTypes.get(0);
}
return BUnionType.create(null, new LinkedHashSet<>(nonNilTypes));
}
boolean isNeverTypeOrStructureTypeWithARequiredNeverMember(BType type) {
switch (type.tag) {
case TypeTags.NEVER:
return true;
case TypeTags.RECORD:
for (BField field : ((BRecordType) type).fields.values()) {
if (!isSameType(type, field.type) && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED) &&
isNeverTypeOrStructureTypeWithARequiredNeverMember(field.type)) {
return true;
}
}
return false;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
List<BType> tupleTypes = tupleType.tupleTypes;
for (BType mem : tupleTypes) {
if (isNeverTypeOrStructureTypeWithARequiredNeverMember(mem)) {
return true;
}
}
return false;
default:
return false;
}
}
private static class ListenerValidationModel {
private final Types types;
private final SymbolTable symtable;
private final BType serviceNameType;
boolean attachFound;
boolean detachFound;
boolean startFound;
boolean gracefulStopFound;
boolean immediateStopFound;
public ListenerValidationModel(Types types, SymbolTable symTable) {
this.types = types;
this.symtable = symTable;
this.serviceNameType =
BUnionType.create(null, symtable.stringType, symtable.arrayStringType, symtable.nilType);
}
boolean isValidListener() {
return attachFound && detachFound && startFound && gracefulStopFound && immediateStopFound;
}
private boolean checkMethods(List<BAttachedFunction> rhsFuncs) {
for (BAttachedFunction func : rhsFuncs) {
switch (func.funcName.value) {
case "attach":
if (!checkAttachMethod(func)) {
return false;
}
break;
case "detach":
if (!checkDetachMethod(func)) {
return false;
}
break;
case "start":
if (!checkStartMethod(func)) {
return true;
}
break;
case "gracefulStop":
if (!checkGracefulStop(func)) {
return false;
}
break;
case "immediateStop":
if (!checkImmediateStop(func)) {
return false;
}
break;
}
}
return isValidListener();
}
private boolean emptyParamList(BAttachedFunction func) {
return func.type.paramTypes.isEmpty() && func.type.restType != symtable.noType;
}
private boolean publicAndReturnsErrorOrNil(BAttachedFunction func) {
if (!Symbols.isPublic(func.symbol)) {
return false;
}
return types.isAssignable(func.type.retType, symtable.errorOrNilType);
}
private boolean isPublicNoParamReturnsErrorOrNil(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
return emptyParamList(func);
}
private boolean checkImmediateStop(BAttachedFunction func) {
return immediateStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkGracefulStop(BAttachedFunction func) {
return gracefulStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkStartMethod(BAttachedFunction func) {
return startFound = publicAndReturnsErrorOrNil(func);
}
private boolean checkDetachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 1) {
return false;
}
return detachFound = isServiceObject(func.type.paramTypes.get(0));
}
private boolean checkAttachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 2) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
if (!isServiceObject(firstParamType)) {
return false;
}
BType secondParamType = func.type.paramTypes.get(1);
boolean sameType = types.isAssignable(secondParamType, this.serviceNameType);
return attachFound = sameType;
}
private boolean isServiceObject(BType type) {
if (type.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isServiceObject(memberType)) {
return false;
}
}
return true;
}
if (type.tag != TypeTags.OBJECT) {
return false;
}
return Symbols.isService(type.tsymbol);
}
}
/**
* Intersection type validation helper.
*
* @since 2.0.0
*/
public static class IntersectionContext {
Location lhsPos;
Location rhsPos;
BLangDiagnosticLog dlog;
ContextOption contextOption;
boolean compilerInternalIntersectionTest;
boolean preferNonGenerativeIntersection;
private IntersectionContext(BLangDiagnosticLog diaglog, Location left, Location right) {
this.dlog = diaglog;
this.lhsPos = left;
this.rhsPos = right;
this.contextOption = ContextOption.NON;
this.compilerInternalIntersectionTest = false;
this.preferNonGenerativeIntersection = false;
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type when user
* explicitly write intersection type. This will produce error messages explaining why there is no intersection
* between two types.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext from(BLangDiagnosticLog diaglog, Location left, Location right) {
return new IntersectionContext(diaglog, left, right);
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type to see if there
* is a intersection between the types. This does not emit error messages explaning why there is no intersection
* between two types. This also does not generate type-def for the calculated intersection type.
* Do not use this context to create a intersection type that uses the calculated type for any purpose other
* than seeing if there is a interserction.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext compilerInternalIntersectionTestContext() {
IntersectionContext diagnosticContext = new IntersectionContext(null, null, null);
diagnosticContext.compilerInternalIntersectionTest = true;
return diagnosticContext;
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type.
* This does not emit error messages explaning why there is no intersection between two types.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext compilerInternalIntersectionContext() {
IntersectionContext diagnosticContext = new IntersectionContext(null, null, null);
return diagnosticContext;
}
/**
* Create {@link IntersectionContext} used for calculating the intersection type, that try not to generate
* new types when possible.
* This is to preserve the previous type-narrowing semantic of intersection calculation.
* This does not emit error messages explaning why there is no intersection between two types.
*
* @return a {@link IntersectionContext}
*/
public static IntersectionContext compilerInternalNonGenerativeIntersectionContext() {
IntersectionContext diagnosticContext = new IntersectionContext(null, null, null);
diagnosticContext.preferNonGenerativeIntersection = true;
return diagnosticContext;
}
public IntersectionContext switchLeft() {
this.contextOption = ContextOption.LEFT;
return this;
}
public IntersectionContext switchRight() {
this.contextOption = ContextOption.RIGHT;
return this;
}
private boolean logError(DiagnosticErrorCode diagnosticCode, Object... args) {
Location pos = null;
if (contextOption == ContextOption.LEFT && lhsPos != null) {
pos = lhsPos;
} else if (contextOption == ContextOption.RIGHT && rhsPos != null) {
pos = rhsPos;
}
if (pos != null) {
dlog.error(pos, diagnosticCode, args);
return true;
}
return false;
}
}
private enum ContextOption {
LEFT, RIGHT, NON;
}
} |
A comment about the `-1 *`here would probably make a lot of sense. | public boolean triggerCheckpoint(long timestamp, boolean isPeriodic) {
try {
triggerCheckpoint(timestamp, checkpointProperties, null, isPeriodic, false);
return true;
} catch (CheckpointException e) {
try {
long latestGeneratedCheckpointId = getCheckpointIdCounter().getAndIncrement();
failureManager.handleCheckpointException(e, -1 * latestGeneratedCheckpointId);
} catch (Exception e1) {
LOG.warn("Get latest generated checkpoint id error : ", e1);
}
return false;
}
} | failureManager.handleCheckpointException(e, -1 * latestGeneratedCheckpointId); | public boolean triggerCheckpoint(long timestamp, boolean isPeriodic) {
try {
triggerCheckpoint(timestamp, checkpointProperties, null, isPeriodic, false);
return true;
} catch (CheckpointException e) {
long latestGeneratedCheckpointId = getCheckpointIdCounter().get();
failureManager.handleCheckpointException(e, -1 * latestGeneratedCheckpointId);
return false;
}
} | class CheckpointCoordinator {
private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class);
/** The number of recent checkpoints whose IDs are remembered. */
private static final int NUM_GHOST_CHECKPOINT_IDS = 16;
/** Coordinator-wide lock to safeguard the checkpoint updates. */
private final Object lock = new Object();
/** Lock specially to make sure that trigger requests do not overtake each other.
* This is not done with the coordinator-wide lock, because as part of triggering,
* blocking operations may happen (distributed atomic counters).
* Using a dedicated lock, we avoid blocking the processing of 'acknowledge/decline'
* messages during that phase. */
private final Object triggerLock = new Object();
/** The job whose checkpoint this coordinator coordinates. */
private final JobID job;
/** Default checkpoint properties. **/
private final CheckpointProperties checkpointProperties;
/** The executor used for asynchronous calls, like potentially blocking I/O. */
private final Executor executor;
/** Tasks who need to be sent a message when a checkpoint is started. */
private final ExecutionVertex[] tasksToTrigger;
/** Tasks who need to acknowledge a checkpoint before it succeeds. */
private final ExecutionVertex[] tasksToWaitFor;
/** Tasks who need to be sent a message when a checkpoint is confirmed. */
private final ExecutionVertex[] tasksToCommitTo;
/** Map from checkpoint ID to the pending checkpoint. */
private final Map<Long, PendingCheckpoint> pendingCheckpoints;
/** Completed checkpoints. Implementations can be blocking. Make sure calls to methods
* accessing this don't block the job manager actor and run asynchronously. */
private final CompletedCheckpointStore completedCheckpointStore;
/** The root checkpoint state backend, which is responsible for initializing the
* checkpoint, storing the metadata, and cleaning up the checkpoint. */
private final CheckpointStorageCoordinatorView checkpointStorage;
/** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */
private final ArrayDeque<Long> recentPendingCheckpoints;
/** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these
* need to be ascending across job managers. */
private final CheckpointIDCounter checkpointIdCounter;
/** The base checkpoint interval. Actual trigger time may be affected by the
* max concurrent checkpoints and minimum-pause values */
private final long baseInterval;
/** The max time (in ms) that a checkpoint may take. */
private final long checkpointTimeout;
/** The min time(in ns) to delay after a checkpoint could be triggered. Allows to
* enforce minimum processing time between checkpoint attempts */
private final long minPauseBetweenCheckpointsNanos;
/** The maximum number of checkpoints that may be in progress at the same time. */
private final int maxConcurrentCheckpointAttempts;
/** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. */
private final ScheduledThreadPoolExecutor timer;
/** The master checkpoint hooks executed by this checkpoint coordinator. */
private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks;
/** Actor that receives status updates from the execution graph this coordinator works for. */
private JobStatusListener jobStatusListener;
/** The number of consecutive failed trigger attempts. */
private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0);
/** A handle to the current periodic trigger, to cancel it when necessary. */
private ScheduledFuture<?> currentPeriodicTrigger;
/** The timestamp (via {@link System
private long lastCheckpointCompletionNanos;
/** Flag whether a triggered checkpoint should immediately schedule the next checkpoint.
* Non-volatile, because only accessed in synchronized scope */
private boolean periodicScheduling;
/** Flag whether a trigger request could not be handled immediately. Non-volatile, because only
* accessed in synchronized scope */
private boolean triggerRequestQueued;
/** Flag marking the coordinator as shut down (not accepting any messages any more). */
private volatile boolean shutdown;
/** Optional tracker for checkpoint statistics. */
@Nullable
private CheckpointStatsTracker statsTracker;
/** A factory for SharedStateRegistry objects. */
private final SharedStateRegistryFactory sharedStateRegistryFactory;
/** Registry that tracks state which is shared across (incremental) checkpoints. */
private SharedStateRegistry sharedStateRegistry;
private final CheckpointFailureManager failureManager;
public CheckpointCoordinator(
JobID job,
long baseInterval,
long checkpointTimeout,
long minPauseBetweenCheckpoints,
int maxConcurrentCheckpointAttempts,
CheckpointRetentionPolicy retentionPolicy,
ExecutionVertex[] tasksToTrigger,
ExecutionVertex[] tasksToWaitFor,
ExecutionVertex[] tasksToCommitTo,
CheckpointIDCounter checkpointIDCounter,
CompletedCheckpointStore completedCheckpointStore,
StateBackend checkpointStateBackend,
Executor executor,
SharedStateRegistryFactory sharedStateRegistryFactory,
CheckpointFailureManager failureManager) {
checkNotNull(checkpointStateBackend);
checkArgument(baseInterval > 0, "Checkpoint base interval must be larger than zero");
checkArgument(checkpointTimeout >= 1, "Checkpoint timeout must be larger than zero");
checkArgument(minPauseBetweenCheckpoints >= 0, "minPauseBetweenCheckpoints must be >= 0");
checkArgument(maxConcurrentCheckpointAttempts >= 1, "maxConcurrentCheckpointAttempts must be >= 1");
if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) {
minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000;
}
if (baseInterval < minPauseBetweenCheckpoints) {
baseInterval = minPauseBetweenCheckpoints;
}
this.job = checkNotNull(job);
this.baseInterval = baseInterval;
this.checkpointTimeout = checkpointTimeout;
this.minPauseBetweenCheckpointsNanos = minPauseBetweenCheckpoints * 1_000_000;
this.maxConcurrentCheckpointAttempts = maxConcurrentCheckpointAttempts;
this.tasksToTrigger = checkNotNull(tasksToTrigger);
this.tasksToWaitFor = checkNotNull(tasksToWaitFor);
this.tasksToCommitTo = checkNotNull(tasksToCommitTo);
this.pendingCheckpoints = new LinkedHashMap<>();
this.checkpointIdCounter = checkNotNull(checkpointIDCounter);
this.completedCheckpointStore = checkNotNull(completedCheckpointStore);
this.executor = checkNotNull(executor);
this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory);
this.sharedStateRegistry = sharedStateRegistryFactory.create(executor);
this.failureManager = checkNotNull(failureManager);
this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS);
this.masterHooks = new HashMap<>();
this.timer = new ScheduledThreadPoolExecutor(1,
new DispatcherThreadFactory(Thread.currentThread().getThreadGroup(), "Checkpoint Timer"));
this.timer.setRemoveOnCancelPolicy(true);
this.timer.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
this.timer.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.checkpointProperties = CheckpointProperties.forCheckpoint(retentionPolicy);
try {
this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job);
} catch (IOException e) {
throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e);
}
try {
checkpointIDCounter.start();
} catch (Throwable t) {
throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t);
}
}
/**
* Adds the given master hook to the checkpoint coordinator. This method does nothing, if
* the checkpoint coordinator already contained a hook with the same ID (as defined via
* {@link MasterTriggerRestoreHook
*
* @param hook The hook to add.
* @return True, if the hook was added, false if the checkpoint coordinator already
* contained a hook with the same ID.
*/
public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) {
checkNotNull(hook);
final String id = hook.getIdentifier();
checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id");
synchronized (lock) {
if (!masterHooks.containsKey(id)) {
masterHooks.put(id, hook);
return true;
}
else {
return false;
}
}
}
/**
* Gets the number of currently register master hooks.
*/
public int getNumberOfRegisteredMasterHooks() {
synchronized (lock) {
return masterHooks.size();
}
}
/**
* Sets the checkpoint stats tracker.
*
* @param statsTracker The checkpoint stats tracker.
*/
public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) {
this.statsTracker = statsTracker;
}
/**
* Shuts down the checkpoint coordinator.
*
* <p>After this method has been called, the coordinator does not accept
* and further messages and cannot trigger any further checkpoints.
*/
public void shutdown(JobStatus jobStatus) throws Exception {
synchronized (lock) {
if (!shutdown) {
shutdown = true;
LOG.info("Stopping checkpoint coordinator for job {}.", job);
periodicScheduling = false;
triggerRequestQueued = false;
MasterHooks.close(masterHooks.values(), LOG);
masterHooks.clear();
timer.shutdownNow();
for (PendingCheckpoint pending : pendingCheckpoints.values()) {
failPendingCheckpoint(pending, CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);
}
pendingCheckpoints.clear();
completedCheckpointStore.shutdown(jobStatus);
checkpointIdCounter.shutdown(jobStatus);
}
}
}
public boolean isShutdown() {
return shutdown;
}
/**
* Triggers a savepoint with the given savepoint directory as a target.
*
* @param timestamp The timestamp for the savepoint.
* @param targetLocation Target location for the savepoint, optional. If null, the
* state backend's configured default will be used.
* @return A future to the completed checkpoint
* @throws IllegalStateException If no savepoint directory has been
* specified and no default savepoint directory has been
* configured
*/
public CompletableFuture<CompletedCheckpoint> triggerSavepoint(
final long timestamp,
@Nullable final String targetLocation) {
final CheckpointProperties properties = CheckpointProperties.forSavepoint();
return triggerSavepointInternal(timestamp, properties, false, targetLocation);
}
/**
* Triggers a synchronous savepoint with the given savepoint directory as a target.
*
* @param timestamp The timestamp for the savepoint.
* @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline
* to fire any registered event-time timers.
* @param targetLocation Target location for the savepoint, optional. If null, the
* state backend's configured default will be used.
* @return A future to the completed checkpoint
* @throws IllegalStateException If no savepoint directory has been
* specified and no default savepoint directory has been
* configured
*/
public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint(
final long timestamp,
final boolean advanceToEndOfEventTime,
@Nullable final String targetLocation) {
final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint();
return triggerSavepointInternal(timestamp, properties, advanceToEndOfEventTime, targetLocation);
}
private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal(
final long timestamp,
final CheckpointProperties checkpointProperties,
final boolean advanceToEndOfEventTime,
@Nullable final String targetLocation) {
checkNotNull(checkpointProperties);
try {
PendingCheckpoint pendingCheckpoint = triggerCheckpoint(
timestamp,
checkpointProperties,
targetLocation,
false,
advanceToEndOfEventTime);
return pendingCheckpoint.getCompletionFuture();
} catch (CheckpointException e) {
Throwable cause = new CheckpointException("Failed to trigger savepoint.", e.getCheckpointFailureReason());
return FutureUtils.completedExceptionally(cause);
}
}
/**
* Triggers a new standard checkpoint and uses the given timestamp as the checkpoint
* timestamp.
*
* @param timestamp The timestamp for the checkpoint.
* @param isPeriodic Flag indicating whether this triggered checkpoint is
* periodic. If this flag is true, but the periodic scheduler is disabled,
* the checkpoint will be declined.
* @return <code>true</code> if triggering the checkpoint succeeded.
*/
@VisibleForTesting
public PendingCheckpoint triggerCheckpoint(
long timestamp,
CheckpointProperties props,
@Nullable String externalSavepointLocation,
boolean isPeriodic,
boolean advanceToEndOfTime) throws CheckpointException {
if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) {
throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX.");
}
synchronized (lock) {
if (shutdown) {
throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);
}
if (isPeriodic && !periodicScheduling) {
throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN);
}
if (!props.forceCheckpoint()) {
if (triggerRequestQueued) {
LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job);
throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED);
}
if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) {
triggerRequestQueued = true;
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS);
}
final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos;
final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000;
if (durationTillNextMillis > 0) {
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(),
durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS);
throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS);
}
}
}
Execution[] executions = new Execution[tasksToTrigger.length];
for (int i = 0; i < tasksToTrigger.length; i++) {
Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt();
if (ee == null) {
LOG.info("Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.",
tasksToTrigger[i].getTaskNameWithSubtaskIndex(),
job);
throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);
} else if (ee.getState() == ExecutionState.RUNNING) {
executions[i] = ee;
} else {
LOG.info("Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.",
tasksToTrigger[i].getTaskNameWithSubtaskIndex(),
job,
ExecutionState.RUNNING,
ee.getState());
throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);
}
}
Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length);
for (ExecutionVertex ev : tasksToWaitFor) {
Execution ee = ev.getCurrentExecutionAttempt();
if (ee != null) {
ackTasks.put(ee.getAttemptId(), ev);
} else {
LOG.info("Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.",
ev.getTaskNameWithSubtaskIndex(),
job);
throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);
}
}
synchronized (triggerLock) {
final CheckpointStorageLocation checkpointStorageLocation;
final long checkpointID;
try {
checkpointID = checkpointIdCounter.getAndIncrement();
checkpointStorageLocation = props.isSavepoint() ?
checkpointStorage.initializeLocationForSavepoint(checkpointID, externalSavepointLocation) :
checkpointStorage.initializeLocationForCheckpoint(checkpointID);
}
catch (Throwable t) {
int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet();
LOG.warn("Failed to trigger checkpoint for job {} ({} consecutive failed attempts so far).",
job,
numUnsuccessful,
t);
throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t);
}
final PendingCheckpoint checkpoint = new PendingCheckpoint(
job,
checkpointID,
timestamp,
ackTasks,
props,
checkpointStorageLocation,
executor);
if (statsTracker != null) {
PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint(
checkpointID,
timestamp,
props);
checkpoint.setStatsCallback(callback);
}
final Runnable canceller = () -> {
synchronized (lock) {
if (!checkpoint.isDiscarded()) {
LOG.info("Checkpoint {} of job {} expired before completing.", checkpointID, job);
failPendingCheckpoint(checkpoint, CheckpointFailureReason.CHECKPOINT_EXPIRED);
pendingCheckpoints.remove(checkpointID);
rememberRecentCheckpointId(checkpointID);
triggerQueuedRequests();
}
}
};
try {
synchronized (lock) {
if (shutdown) {
throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);
}
else if (!props.forceCheckpoint()) {
if (triggerRequestQueued) {
LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job);
throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED);
}
if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) {
triggerRequestQueued = true;
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS);
}
final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos;
final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000;
if (durationTillNextMillis > 0) {
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(),
durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS);
throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS);
}
}
LOG.info("Triggering checkpoint {} @ {} for job {}.", checkpointID, timestamp, job);
pendingCheckpoints.put(checkpointID, checkpoint);
ScheduledFuture<?> cancellerHandle = timer.schedule(
canceller,
checkpointTimeout, TimeUnit.MILLISECONDS);
if (!checkpoint.setCancellerHandle(cancellerHandle)) {
cancellerHandle.cancel(false);
}
final List<MasterState> masterStates = MasterHooks.triggerMasterHooks(masterHooks.values(),
checkpointID, timestamp, executor, Time.milliseconds(checkpointTimeout));
for (MasterState s : masterStates) {
checkpoint.addMasterState(s);
}
}
final CheckpointOptions checkpointOptions = new CheckpointOptions(
props.getCheckpointType(),
checkpointStorageLocation.getLocationReference());
for (Execution execution: executions) {
if (props.isSynchronous()) {
execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime);
} else {
execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions);
}
}
numUnsuccessfulCheckpointsTriggers.set(0);
return checkpoint;
}
catch (Throwable t) {
synchronized (lock) {
pendingCheckpoints.remove(checkpointID);
}
int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet();
LOG.warn("Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)",
checkpointID, job, numUnsuccessful, t);
if (!checkpoint.isDiscarded()) {
failPendingCheckpoint(checkpoint, CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, t);
}
try {
checkpointStorageLocation.disposeOnFailure();
}
catch (Throwable t2) {
LOG.warn("Cannot dispose failed checkpoint storage location {}", checkpointStorageLocation, t2);
}
throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t);
}
}
}
/**
* Receives a {@link DeclineCheckpoint} message for a pending checkpoint.
*
* @param message Checkpoint decline from the task manager
*/
public void receiveDeclineMessage(DeclineCheckpoint message) {
if (shutdown || message == null) {
return;
}
if (!job.equals(message.getJob())) {
throw new IllegalArgumentException("Received DeclineCheckpoint message for job " +
message.getJob() + " while this coordinator handles job " + job);
}
final long checkpointId = message.getCheckpointId();
final String reason = (message.getReason() != null ? message.getReason().getMessage() : "");
PendingCheckpoint checkpoint;
synchronized (lock) {
if (shutdown) {
return;
}
checkpoint = pendingCheckpoints.remove(checkpointId);
if (checkpoint != null && !checkpoint.isDiscarded()) {
LOG.info("Decline checkpoint {} by task {} of job {}.", checkpointId, message.getTaskExecutionId(), job);
discardCheckpoint(checkpoint, message.getReason());
}
else if (checkpoint != null) {
throw new IllegalStateException(
"Received message for discarded but non-removed checkpoint " + checkpointId);
}
else if (LOG.isDebugEnabled()) {
if (recentPendingCheckpoints.contains(checkpointId)) {
LOG.debug("Received another decline message for now expired checkpoint attempt {} of job {} : {}",
checkpointId, job, reason);
} else {
LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} of job {} : {}",
checkpointId, job, reason);
}
}
}
}
/**
* Receives an AcknowledgeCheckpoint message and returns whether the
* message was associated with a pending checkpoint.
*
* @param message Checkpoint ack from the task manager
*
* @return Flag indicating whether the ack'd checkpoint was associated
* with a pending checkpoint.
*
* @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store.
*/
public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message) throws CheckpointException {
if (shutdown || message == null) {
return false;
}
if (!job.equals(message.getJob())) {
LOG.error("Received wrong AcknowledgeCheckpoint message for job {}: {}", job, message);
return false;
}
final long checkpointId = message.getCheckpointId();
synchronized (lock) {
if (shutdown) {
return false;
}
final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId);
if (checkpoint != null && !checkpoint.isDiscarded()) {
switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) {
case SUCCESS:
LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {}.",
checkpointId, message.getTaskExecutionId(), message.getJob());
if (checkpoint.isFullyAcknowledged()) {
completePendingCheckpoint(checkpoint);
}
break;
case DUPLICATE:
LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}.",
message.getCheckpointId(), message.getTaskExecutionId(), message.getJob());
break;
case UNKNOWN:
LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " +
"because the task's execution attempt id was unknown. Discarding " +
"the state handle to avoid lingering state.", message.getCheckpointId(),
message.getTaskExecutionId(), message.getJob());
discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());
break;
case DISCARDED:
LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {}, " +
"because the pending checkpoint had been discarded. Discarding the " +
"state handle tp avoid lingering state.",
message.getCheckpointId(), message.getTaskExecutionId(), message.getJob());
discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());
}
return true;
}
else if (checkpoint != null) {
throw new IllegalStateException(
"Received message for discarded but non-removed checkpoint " + checkpointId);
}
else {
boolean wasPendingCheckpoint;
if (recentPendingCheckpoints.contains(checkpointId)) {
wasPendingCheckpoint = true;
LOG.warn("Received late message for now expired checkpoint attempt {} from " +
"{} of job {}.", checkpointId, message.getTaskExecutionId(), message.getJob());
}
else {
LOG.debug("Received message for an unknown checkpoint {} from {} of job {}.",
checkpointId, message.getTaskExecutionId(), message.getJob());
wasPendingCheckpoint = false;
}
discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());
return wasPendingCheckpoint;
}
}
}
/**
* Try to complete the given pending checkpoint.
*
* <p>Important: This method should only be called in the checkpoint lock scope.
*
* @param pendingCheckpoint to complete
* @throws CheckpointException if the completion failed
*/
private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException {
final long checkpointId = pendingCheckpoint.getCheckpointId();
final CompletedCheckpoint completedCheckpoint;
Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates();
sharedStateRegistry.registerAll(operatorStates.values());
try {
try {
completedCheckpoint = pendingCheckpoint.finalizeCheckpoint();
failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId());
}
catch (Exception e1) {
if (!pendingCheckpoint.isDiscarded()) {
failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1);
}
throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.',
CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1);
}
Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null);
try {
completedCheckpointStore.addCheckpoint(completedCheckpoint);
} catch (Exception exception) {
executor.execute(new Runnable() {
@Override
public void run() {
try {
completedCheckpoint.discardOnFailedStoring();
} catch (Throwable t) {
LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t);
}
}
});
throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.',
CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception);
}
} finally {
pendingCheckpoints.remove(checkpointId);
triggerQueuedRequests();
}
rememberRecentCheckpointId(checkpointId);
dropSubsumedCheckpoints(checkpointId);
lastCheckpointCompletionNanos = System.nanoTime();
LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job,
completedCheckpoint.getStateSize(), completedCheckpoint.getDuration());
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
builder.append("Checkpoint state: ");
for (OperatorState state : completedCheckpoint.getOperatorStates().values()) {
builder.append(state);
builder.append(", ");
}
builder.setLength(builder.length() - 2);
LOG.debug(builder.toString());
}
final long timestamp = completedCheckpoint.getTimestamp();
for (ExecutionVertex ev : tasksToCommitTo) {
Execution ee = ev.getCurrentExecutionAttempt();
if (ee != null) {
ee.notifyCheckpointComplete(checkpointId, timestamp);
}
}
}
/**
* Fails all pending checkpoints which have not been acknowledged by the given execution
* attempt id.
*
* @param executionAttemptId for which to discard unacknowledged pending checkpoints
* @param cause of the failure
*/
public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) {
synchronized (lock) {
Iterator<PendingCheckpoint> pendingCheckpointIterator = pendingCheckpoints.values().iterator();
while (pendingCheckpointIterator.hasNext()) {
final PendingCheckpoint pendingCheckpoint = pendingCheckpointIterator.next();
if (!pendingCheckpoint.isAcknowledgedBy(executionAttemptId)) {
pendingCheckpointIterator.remove();
discardCheckpoint(pendingCheckpoint, cause);
}
}
}
}
private void rememberRecentCheckpointId(long id) {
if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) {
recentPendingCheckpoints.removeFirst();
}
recentPendingCheckpoints.addLast(id);
}
private void dropSubsumedCheckpoints(long checkpointId) {
Iterator<Map.Entry<Long, PendingCheckpoint>> entries = pendingCheckpoints.entrySet().iterator();
while (entries.hasNext()) {
PendingCheckpoint p = entries.next().getValue();
if (p.getCheckpointId() < checkpointId && p.canBeSubsumed()) {
rememberRecentCheckpointId(p.getCheckpointId());
failPendingCheckpoint(p, CheckpointFailureReason.CHECKPOINT_SUBSUMED);
entries.remove();
}
}
}
/**
* Triggers the queued request, if there is one.
*
* <p>NOTE: The caller of this method must hold the lock when invoking the method!
*/
private void triggerQueuedRequests() {
if (triggerRequestQueued) {
triggerRequestQueued = false;
if (periodicScheduling) {
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
}
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(),
0L, baseInterval, TimeUnit.MILLISECONDS);
}
else {
timer.execute(new ScheduledTrigger());
}
}
}
@VisibleForTesting
int getNumScheduledTasks() {
return timer.getQueue().size();
}
/**
* Restores the latest checkpointed state.
*
* @param tasks Map of job vertices to restore. State for these vertices is
* restored via {@link Execution
* @param errorIfNoCheckpoint Fail if no completed checkpoint is available to
* restore from.
* @param allowNonRestoredState Allow checkpoint state that cannot be mapped
* to any job vertex in tasks.
* @return <code>true</code> if state was restored, <code>false</code> otherwise.
* @throws IllegalStateException If the CheckpointCoordinator is shut down.
* @throws IllegalStateException If no completed checkpoint is available and
* the <code>failIfNoCheckpoint</code> flag has been set.
* @throws IllegalStateException If the checkpoint contains state that cannot be
* mapped to any job vertex in <code>tasks</code> and the
* <code>allowNonRestoredState</code> flag has not been set.
* @throws IllegalStateException If the max parallelism changed for an operator
* that restores state from this checkpoint.
* @throws IllegalStateException If the parallelism changed for an operator
* that restores <i>non-partitioned</i> state from this
* checkpoint.
*/
public boolean restoreLatestCheckpointedState(
Map<JobVertexID, ExecutionJobVertex> tasks,
boolean errorIfNoCheckpoint,
boolean allowNonRestoredState) throws Exception {
synchronized (lock) {
if (shutdown) {
throw new IllegalStateException("CheckpointCoordinator is shut down");
}
sharedStateRegistry.close();
sharedStateRegistry = sharedStateRegistryFactory.create(executor);
completedCheckpointStore.recover();
for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) {
completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry);
}
LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry);
CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint();
if (latest == null) {
if (errorIfNoCheckpoint) {
throw new IllegalStateException("No completed checkpoint available");
} else {
LOG.debug("Resetting the master hooks.");
MasterHooks.reset(masterHooks.values(), LOG);
return false;
}
}
LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest);
final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates();
StateAssignmentOperation stateAssignmentOperation =
new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState);
stateAssignmentOperation.assignStates();
MasterHooks.restoreMasterHooks(
masterHooks,
latest.getMasterHookStates(),
latest.getCheckpointID(),
allowNonRestoredState,
LOG);
if (statsTracker != null) {
long restoreTimestamp = System.currentTimeMillis();
RestoredCheckpointStats restored = new RestoredCheckpointStats(
latest.getCheckpointID(),
latest.getProperties(),
restoreTimestamp,
latest.getExternalPointer());
statsTracker.reportRestoredCheckpoint(restored);
}
return true;
}
}
/**
* Restore the state with given savepoint.
*
* @param savepointPointer The pointer to the savepoint.
* @param allowNonRestored True if allowing checkpoint state that cannot be
* mapped to any job vertex in tasks.
* @param tasks Map of job vertices to restore. State for these
* vertices is restored via
* {@link Execution
* @param userClassLoader The class loader to resolve serialized classes in
* legacy savepoint versions.
*/
public boolean restoreSavepoint(
String savepointPointer,
boolean allowNonRestored,
Map<JobVertexID, ExecutionJobVertex> tasks,
ClassLoader userClassLoader) throws Exception {
Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null.");
LOG.info("Starting job {} from savepoint {} ({})",
job, savepointPointer, (allowNonRestored ? "allowing non restored state" : ""));
final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer);
CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint(
job, tasks, checkpointLocation, userClassLoader, allowNonRestored);
completedCheckpointStore.addCheckpoint(savepoint);
long nextCheckpointId = savepoint.getCheckpointID() + 1;
checkpointIdCounter.setCount(nextCheckpointId);
LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId);
return restoreLatestCheckpointedState(tasks, true, allowNonRestored);
}
public int getNumberOfPendingCheckpoints() {
return this.pendingCheckpoints.size();
}
public int getNumberOfRetainedSuccessfulCheckpoints() {
synchronized (lock) {
return completedCheckpointStore.getNumberOfRetainedCheckpoints();
}
}
public Map<Long, PendingCheckpoint> getPendingCheckpoints() {
synchronized (lock) {
return new HashMap<>(this.pendingCheckpoints);
}
}
public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception {
synchronized (lock) {
return completedCheckpointStore.getAllCheckpoints();
}
}
public CheckpointStorageCoordinatorView getCheckpointStorage() {
return checkpointStorage;
}
public CompletedCheckpointStore getCheckpointStore() {
return completedCheckpointStore;
}
public CheckpointIDCounter getCheckpointIdCounter() {
return checkpointIdCounter;
}
public long getCheckpointTimeout() {
return checkpointTimeout;
}
/**
* Returns whether periodic checkpointing has been configured.
*
* @return <code>true</code> if periodic checkpoints have been configured.
*/
public boolean isPeriodicCheckpointingConfigured() {
return baseInterval != Long.MAX_VALUE;
}
public void startCheckpointScheduler() {
synchronized (lock) {
if (shutdown) {
throw new IllegalArgumentException("Checkpoint coordinator is shut down");
}
stopCheckpointScheduler();
periodicScheduling = true;
long initialDelay = ThreadLocalRandom.current().nextLong(
minPauseBetweenCheckpointsNanos / 1_000_000L, baseInterval + 1L);
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(), initialDelay, baseInterval, TimeUnit.MILLISECONDS);
}
}
public void stopCheckpointScheduler() {
synchronized (lock) {
triggerRequestQueued = false;
periodicScheduling = false;
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
abortPendingCheckpoints(new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND));
numUnsuccessfulCheckpointsTriggers.set(0);
}
}
/**
* Aborts all the pending checkpoints due to en exception.
* @param exception The exception.
*/
public void abortPendingCheckpoints(CheckpointException exception) {
synchronized (lock) {
for (PendingCheckpoint p : pendingCheckpoints.values()) {
failPendingCheckpoint(p, exception.getCheckpointFailureReason());
}
pendingCheckpoints.clear();
}
}
public JobStatusListener createActivatorDeactivator() {
synchronized (lock) {
if (shutdown) {
throw new IllegalArgumentException("Checkpoint coordinator is shut down");
}
if (jobStatusListener == null) {
jobStatusListener = new CheckpointCoordinatorDeActivator(this);
}
return jobStatusListener;
}
}
private final class ScheduledTrigger implements Runnable {
@Override
public void run() {
try {
triggerCheckpoint(System.currentTimeMillis(), true);
}
catch (Exception e) {
LOG.error("Exception while triggering checkpoint for job {}.", job, e);
}
}
}
/**
* Discards the given pending checkpoint because of the given cause.
*
* @param pendingCheckpoint to discard
* @param cause for discarding the checkpoint
*/
private void discardCheckpoint(PendingCheckpoint pendingCheckpoint, @Nullable Throwable cause) {
assert(Thread.holdsLock(lock));
Preconditions.checkNotNull(pendingCheckpoint);
final long checkpointId = pendingCheckpoint.getCheckpointId();
LOG.info("Discarding checkpoint {} of job {}.", checkpointId, job, cause);
if (cause == null || cause instanceof CheckpointDeclineException) {
failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.CHECKPOINT_DECLINED, cause);
} else {
failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.JOB_FAILURE, cause);
}
rememberRecentCheckpointId(checkpointId);
boolean haveMoreRecentPending = false;
for (PendingCheckpoint p : pendingCheckpoints.values()) {
if (!p.isDiscarded() && p.getCheckpointId() >= pendingCheckpoint.getCheckpointId()) {
haveMoreRecentPending = true;
break;
}
}
if (!haveMoreRecentPending) {
triggerQueuedRequests();
}
}
/**
* Discards the given state object asynchronously belonging to the given job, execution attempt
* id and checkpoint id.
*
* @param jobId identifying the job to which the state object belongs
* @param executionAttemptID identifying the task to which the state object belongs
* @param checkpointId of the state object
* @param subtaskState to discard asynchronously
*/
private void discardSubtaskState(
final JobID jobId,
final ExecutionAttemptID executionAttemptID,
final long checkpointId,
final TaskStateSnapshot subtaskState) {
if (subtaskState != null) {
executor.execute(new Runnable() {
@Override
public void run() {
try {
subtaskState.discardState();
} catch (Throwable t2) {
LOG.warn("Could not properly discard state object of checkpoint {} " +
"belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2);
}
}
});
}
}
private void failPendingCheckpoint(
final PendingCheckpoint pendingCheckpoint,
final CheckpointFailureReason reason,
final Throwable cause) {
CheckpointException exception = new CheckpointException(reason, cause);
if (cause != null) {
pendingCheckpoint.abort(reason, cause);
} else {
pendingCheckpoint.abort(reason);
}
failureManager.handleCheckpointException(exception, pendingCheckpoint.getCheckpointId());
}
private void failPendingCheckpoint(
final PendingCheckpoint pendingCheckpoint,
final CheckpointFailureReason reason) {
failPendingCheckpoint(pendingCheckpoint, reason, null);
}
} | class CheckpointCoordinator {
private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class);
/** The number of recent checkpoints whose IDs are remembered. */
private static final int NUM_GHOST_CHECKPOINT_IDS = 16;
/** Coordinator-wide lock to safeguard the checkpoint updates. */
private final Object lock = new Object();
/** Lock specially to make sure that trigger requests do not overtake each other.
* This is not done with the coordinator-wide lock, because as part of triggering,
* blocking operations may happen (distributed atomic counters).
* Using a dedicated lock, we avoid blocking the processing of 'acknowledge/decline'
* messages during that phase. */
private final Object triggerLock = new Object();
/** The job whose checkpoint this coordinator coordinates. */
private final JobID job;
/** Default checkpoint properties. **/
private final CheckpointProperties checkpointProperties;
/** The executor used for asynchronous calls, like potentially blocking I/O. */
private final Executor executor;
/** Tasks who need to be sent a message when a checkpoint is started. */
private final ExecutionVertex[] tasksToTrigger;
/** Tasks who need to acknowledge a checkpoint before it succeeds. */
private final ExecutionVertex[] tasksToWaitFor;
/** Tasks who need to be sent a message when a checkpoint is confirmed. */
private final ExecutionVertex[] tasksToCommitTo;
/** Map from checkpoint ID to the pending checkpoint. */
private final Map<Long, PendingCheckpoint> pendingCheckpoints;
/** Completed checkpoints. Implementations can be blocking. Make sure calls to methods
* accessing this don't block the job manager actor and run asynchronously. */
private final CompletedCheckpointStore completedCheckpointStore;
/** The root checkpoint state backend, which is responsible for initializing the
* checkpoint, storing the metadata, and cleaning up the checkpoint. */
private final CheckpointStorageCoordinatorView checkpointStorage;
/** A list of recent checkpoint IDs, to identify late messages (vs invalid ones). */
private final ArrayDeque<Long> recentPendingCheckpoints;
/** Checkpoint ID counter to ensure ascending IDs. In case of job manager failures, these
* need to be ascending across job managers. */
private final CheckpointIDCounter checkpointIdCounter;
/** The base checkpoint interval. Actual trigger time may be affected by the
* max concurrent checkpoints and minimum-pause values */
private final long baseInterval;
/** The max time (in ms) that a checkpoint may take. */
private final long checkpointTimeout;
/** The min time(in ns) to delay after a checkpoint could be triggered. Allows to
* enforce minimum processing time between checkpoint attempts */
private final long minPauseBetweenCheckpointsNanos;
/** The maximum number of checkpoints that may be in progress at the same time. */
private final int maxConcurrentCheckpointAttempts;
/** The timer that handles the checkpoint timeouts and triggers periodic checkpoints. */
private final ScheduledThreadPoolExecutor timer;
/** The master checkpoint hooks executed by this checkpoint coordinator. */
private final HashMap<String, MasterTriggerRestoreHook<?>> masterHooks;
/** Actor that receives status updates from the execution graph this coordinator works for. */
private JobStatusListener jobStatusListener;
/** The number of consecutive failed trigger attempts. */
private final AtomicInteger numUnsuccessfulCheckpointsTriggers = new AtomicInteger(0);
/** A handle to the current periodic trigger, to cancel it when necessary. */
private ScheduledFuture<?> currentPeriodicTrigger;
/** The timestamp (via {@link System
private long lastCheckpointCompletionNanos;
/** Flag whether a triggered checkpoint should immediately schedule the next checkpoint.
* Non-volatile, because only accessed in synchronized scope */
private boolean periodicScheduling;
/** Flag whether a trigger request could not be handled immediately. Non-volatile, because only
* accessed in synchronized scope */
private boolean triggerRequestQueued;
/** Flag marking the coordinator as shut down (not accepting any messages any more). */
private volatile boolean shutdown;
/** Optional tracker for checkpoint statistics. */
@Nullable
private CheckpointStatsTracker statsTracker;
/** A factory for SharedStateRegistry objects. */
private final SharedStateRegistryFactory sharedStateRegistryFactory;
/** Registry that tracks state which is shared across (incremental) checkpoints. */
private SharedStateRegistry sharedStateRegistry;
private boolean isPreferCheckpointForRecovery;
private final CheckpointFailureManager failureManager;
public CheckpointCoordinator(
JobID job,
CheckpointCoordinatorConfiguration chkConfig,
ExecutionVertex[] tasksToTrigger,
ExecutionVertex[] tasksToWaitFor,
ExecutionVertex[] tasksToCommitTo,
CheckpointIDCounter checkpointIDCounter,
CompletedCheckpointStore completedCheckpointStore,
StateBackend checkpointStateBackend,
Executor executor,
SharedStateRegistryFactory sharedStateRegistryFactory,
CheckpointFailureManager failureManager) {
checkNotNull(checkpointStateBackend);
long minPauseBetweenCheckpoints = chkConfig.getMinPauseBetweenCheckpoints();
if (minPauseBetweenCheckpoints > 365L * 24 * 60 * 60 * 1_000) {
minPauseBetweenCheckpoints = 365L * 24 * 60 * 60 * 1_000;
}
long baseInterval = chkConfig.getCheckpointInterval();
if (baseInterval < minPauseBetweenCheckpoints) {
baseInterval = minPauseBetweenCheckpoints;
}
this.job = checkNotNull(job);
this.baseInterval = baseInterval;
this.checkpointTimeout = chkConfig.getCheckpointTimeout();
this.minPauseBetweenCheckpointsNanos = minPauseBetweenCheckpoints * 1_000_000;
this.maxConcurrentCheckpointAttempts = chkConfig.getMaxConcurrentCheckpoints();
this.tasksToTrigger = checkNotNull(tasksToTrigger);
this.tasksToWaitFor = checkNotNull(tasksToWaitFor);
this.tasksToCommitTo = checkNotNull(tasksToCommitTo);
this.pendingCheckpoints = new LinkedHashMap<>();
this.checkpointIdCounter = checkNotNull(checkpointIDCounter);
this.completedCheckpointStore = checkNotNull(completedCheckpointStore);
this.executor = checkNotNull(executor);
this.sharedStateRegistryFactory = checkNotNull(sharedStateRegistryFactory);
this.sharedStateRegistry = sharedStateRegistryFactory.create(executor);
this.isPreferCheckpointForRecovery = chkConfig.isPreferCheckpointForRecovery();
this.failureManager = checkNotNull(failureManager);
this.recentPendingCheckpoints = new ArrayDeque<>(NUM_GHOST_CHECKPOINT_IDS);
this.masterHooks = new HashMap<>();
this.timer = new ScheduledThreadPoolExecutor(1,
new DispatcherThreadFactory(Thread.currentThread().getThreadGroup(), "Checkpoint Timer"));
this.timer.setRemoveOnCancelPolicy(true);
this.timer.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
this.timer.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.checkpointProperties = CheckpointProperties.forCheckpoint(chkConfig.getCheckpointRetentionPolicy());
try {
this.checkpointStorage = checkpointStateBackend.createCheckpointStorage(job);
} catch (IOException e) {
throw new FlinkRuntimeException("Failed to create checkpoint storage at checkpoint coordinator side.", e);
}
try {
checkpointIDCounter.start();
} catch (Throwable t) {
throw new RuntimeException("Failed to start checkpoint ID counter: " + t.getMessage(), t);
}
}
/**
* Adds the given master hook to the checkpoint coordinator. This method does nothing, if
* the checkpoint coordinator already contained a hook with the same ID (as defined via
* {@link MasterTriggerRestoreHook
*
* @param hook The hook to add.
* @return True, if the hook was added, false if the checkpoint coordinator already
* contained a hook with the same ID.
*/
public boolean addMasterHook(MasterTriggerRestoreHook<?> hook) {
checkNotNull(hook);
final String id = hook.getIdentifier();
checkArgument(!StringUtils.isNullOrWhitespaceOnly(id), "The hook has a null or empty id");
synchronized (lock) {
if (!masterHooks.containsKey(id)) {
masterHooks.put(id, hook);
return true;
}
else {
return false;
}
}
}
/**
* Gets the number of currently register master hooks.
*/
public int getNumberOfRegisteredMasterHooks() {
synchronized (lock) {
return masterHooks.size();
}
}
/**
* Sets the checkpoint stats tracker.
*
* @param statsTracker The checkpoint stats tracker.
*/
public void setCheckpointStatsTracker(@Nullable CheckpointStatsTracker statsTracker) {
this.statsTracker = statsTracker;
}
/**
* Shuts down the checkpoint coordinator.
*
* <p>After this method has been called, the coordinator does not accept
* and further messages and cannot trigger any further checkpoints.
*/
public void shutdown(JobStatus jobStatus) throws Exception {
synchronized (lock) {
if (!shutdown) {
shutdown = true;
LOG.info("Stopping checkpoint coordinator for job {}.", job);
periodicScheduling = false;
triggerRequestQueued = false;
MasterHooks.close(masterHooks.values(), LOG);
masterHooks.clear();
timer.shutdownNow();
for (PendingCheckpoint pending : pendingCheckpoints.values()) {
failPendingCheckpoint(pending, CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);
}
pendingCheckpoints.clear();
completedCheckpointStore.shutdown(jobStatus);
checkpointIdCounter.shutdown(jobStatus);
}
}
}
public boolean isShutdown() {
return shutdown;
}
/**
* Triggers a savepoint with the given savepoint directory as a target.
*
* @param timestamp The timestamp for the savepoint.
* @param targetLocation Target location for the savepoint, optional. If null, the
* state backend's configured default will be used.
* @return A future to the completed checkpoint
* @throws IllegalStateException If no savepoint directory has been
* specified and no default savepoint directory has been
* configured
*/
public CompletableFuture<CompletedCheckpoint> triggerSavepoint(
final long timestamp,
@Nullable final String targetLocation) {
final CheckpointProperties properties = CheckpointProperties.forSavepoint();
return triggerSavepointInternal(timestamp, properties, false, targetLocation);
}
/**
* Triggers a synchronous savepoint with the given savepoint directory as a target.
*
* @param timestamp The timestamp for the savepoint.
* @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline
* to fire any registered event-time timers.
* @param targetLocation Target location for the savepoint, optional. If null, the
* state backend's configured default will be used.
* @return A future to the completed checkpoint
* @throws IllegalStateException If no savepoint directory has been
* specified and no default savepoint directory has been
* configured
*/
public CompletableFuture<CompletedCheckpoint> triggerSynchronousSavepoint(
final long timestamp,
final boolean advanceToEndOfEventTime,
@Nullable final String targetLocation) {
final CheckpointProperties properties = CheckpointProperties.forSyncSavepoint();
return triggerSavepointInternal(timestamp, properties, advanceToEndOfEventTime, targetLocation);
}
private CompletableFuture<CompletedCheckpoint> triggerSavepointInternal(
final long timestamp,
final CheckpointProperties checkpointProperties,
final boolean advanceToEndOfEventTime,
@Nullable final String targetLocation) {
checkNotNull(checkpointProperties);
try {
PendingCheckpoint pendingCheckpoint = triggerCheckpoint(
timestamp,
checkpointProperties,
targetLocation,
false,
advanceToEndOfEventTime);
return pendingCheckpoint.getCompletionFuture();
} catch (CheckpointException e) {
Throwable cause = new CheckpointException("Failed to trigger savepoint.", e.getCheckpointFailureReason());
return FutureUtils.completedExceptionally(cause);
}
}
/**
* Triggers a new standard checkpoint and uses the given timestamp as the checkpoint
* timestamp.
*
* @param timestamp The timestamp for the checkpoint.
* @param isPeriodic Flag indicating whether this triggered checkpoint is
* periodic. If this flag is true, but the periodic scheduler is disabled,
* the checkpoint will be declined.
* @return <code>true</code> if triggering the checkpoint succeeded.
*/
@VisibleForTesting
public PendingCheckpoint triggerCheckpoint(
long timestamp,
CheckpointProperties props,
@Nullable String externalSavepointLocation,
boolean isPeriodic,
boolean advanceToEndOfTime) throws CheckpointException {
if (advanceToEndOfTime && !(props.isSynchronous() && props.isSavepoint())) {
throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX.");
}
synchronized (lock) {
if (shutdown) {
throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);
}
if (isPeriodic && !periodicScheduling) {
throw new CheckpointException(CheckpointFailureReason.PERIODIC_SCHEDULER_SHUTDOWN);
}
if (!props.forceCheckpoint()) {
if (triggerRequestQueued) {
LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job);
throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED);
}
if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) {
triggerRequestQueued = true;
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS);
}
final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos;
final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000;
if (durationTillNextMillis > 0) {
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(),
durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS);
throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS);
}
}
}
Execution[] executions = new Execution[tasksToTrigger.length];
for (int i = 0; i < tasksToTrigger.length; i++) {
Execution ee = tasksToTrigger[i].getCurrentExecutionAttempt();
if (ee == null) {
LOG.info("Checkpoint triggering task {} of job {} is not being executed at the moment. Aborting checkpoint.",
tasksToTrigger[i].getTaskNameWithSubtaskIndex(),
job);
throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);
} else if (ee.getState() == ExecutionState.RUNNING) {
executions[i] = ee;
} else {
LOG.info("Checkpoint triggering task {} of job {} is not in state {} but {} instead. Aborting checkpoint.",
tasksToTrigger[i].getTaskNameWithSubtaskIndex(),
job,
ExecutionState.RUNNING,
ee.getState());
throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);
}
}
Map<ExecutionAttemptID, ExecutionVertex> ackTasks = new HashMap<>(tasksToWaitFor.length);
for (ExecutionVertex ev : tasksToWaitFor) {
Execution ee = ev.getCurrentExecutionAttempt();
if (ee != null) {
ackTasks.put(ee.getAttemptId(), ev);
} else {
LOG.info("Checkpoint acknowledging task {} of job {} is not being executed at the moment. Aborting checkpoint.",
ev.getTaskNameWithSubtaskIndex(),
job);
throw new CheckpointException(CheckpointFailureReason.NOT_ALL_REQUIRED_TASKS_RUNNING);
}
}
synchronized (triggerLock) {
final CheckpointStorageLocation checkpointStorageLocation;
final long checkpointID;
try {
checkpointID = checkpointIdCounter.getAndIncrement();
checkpointStorageLocation = props.isSavepoint() ?
checkpointStorage.initializeLocationForSavepoint(checkpointID, externalSavepointLocation) :
checkpointStorage.initializeLocationForCheckpoint(checkpointID);
}
catch (Throwable t) {
int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet();
LOG.warn("Failed to trigger checkpoint for job {} ({} consecutive failed attempts so far).",
job,
numUnsuccessful,
t);
throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t);
}
final PendingCheckpoint checkpoint = new PendingCheckpoint(
job,
checkpointID,
timestamp,
ackTasks,
props,
checkpointStorageLocation,
executor);
if (statsTracker != null) {
PendingCheckpointStats callback = statsTracker.reportPendingCheckpoint(
checkpointID,
timestamp,
props);
checkpoint.setStatsCallback(callback);
}
final Runnable canceller = () -> {
synchronized (lock) {
if (!checkpoint.isDiscarded()) {
LOG.info("Checkpoint {} of job {} expired before completing.", checkpointID, job);
failPendingCheckpoint(checkpoint, CheckpointFailureReason.CHECKPOINT_EXPIRED);
pendingCheckpoints.remove(checkpointID);
rememberRecentCheckpointId(checkpointID);
triggerQueuedRequests();
}
}
};
try {
synchronized (lock) {
if (shutdown) {
throw new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SHUTDOWN);
}
else if (!props.forceCheckpoint()) {
if (triggerRequestQueued) {
LOG.warn("Trying to trigger another checkpoint for job {} while one was queued already.", job);
throw new CheckpointException(CheckpointFailureReason.ALREADY_QUEUED);
}
if (pendingCheckpoints.size() >= maxConcurrentCheckpointAttempts) {
triggerRequestQueued = true;
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
throw new CheckpointException(CheckpointFailureReason.TOO_MANY_CONCURRENT_CHECKPOINTS);
}
final long earliestNext = lastCheckpointCompletionNanos + minPauseBetweenCheckpointsNanos;
final long durationTillNextMillis = (earliestNext - System.nanoTime()) / 1_000_000;
if (durationTillNextMillis > 0) {
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(),
durationTillNextMillis, baseInterval, TimeUnit.MILLISECONDS);
throw new CheckpointException(CheckpointFailureReason.MINIMUM_TIME_BETWEEN_CHECKPOINTS);
}
}
LOG.info("Triggering checkpoint {} @ {} for job {}.", checkpointID, timestamp, job);
pendingCheckpoints.put(checkpointID, checkpoint);
ScheduledFuture<?> cancellerHandle = timer.schedule(
canceller,
checkpointTimeout, TimeUnit.MILLISECONDS);
if (!checkpoint.setCancellerHandle(cancellerHandle)) {
cancellerHandle.cancel(false);
}
final List<MasterState> masterStates = MasterHooks.triggerMasterHooks(masterHooks.values(),
checkpointID, timestamp, executor, Time.milliseconds(checkpointTimeout));
for (MasterState s : masterStates) {
checkpoint.addMasterState(s);
}
}
final CheckpointOptions checkpointOptions = new CheckpointOptions(
props.getCheckpointType(),
checkpointStorageLocation.getLocationReference());
for (Execution execution: executions) {
if (props.isSynchronous()) {
execution.triggerSynchronousSavepoint(checkpointID, timestamp, checkpointOptions, advanceToEndOfTime);
} else {
execution.triggerCheckpoint(checkpointID, timestamp, checkpointOptions);
}
}
numUnsuccessfulCheckpointsTriggers.set(0);
return checkpoint;
}
catch (Throwable t) {
synchronized (lock) {
pendingCheckpoints.remove(checkpointID);
}
int numUnsuccessful = numUnsuccessfulCheckpointsTriggers.incrementAndGet();
LOG.warn("Failed to trigger checkpoint {} for job {}. ({} consecutive failed attempts so far)",
checkpointID, job, numUnsuccessful, t);
if (!checkpoint.isDiscarded()) {
failPendingCheckpoint(checkpoint, CheckpointFailureReason.TRIGGER_CHECKPOINT_FAILURE, t);
}
try {
checkpointStorageLocation.disposeOnFailure();
}
catch (Throwable t2) {
LOG.warn("Cannot dispose failed checkpoint storage location {}", checkpointStorageLocation, t2);
}
throw new CheckpointException(CheckpointFailureReason.EXCEPTION, t);
}
}
}
/**
* Receives a {@link DeclineCheckpoint} message for a pending checkpoint.
*
* @param message Checkpoint decline from the task manager
* @param taskManagerLocationInfo The location info of the decline checkpoint message's sender
*/
public void receiveDeclineMessage(DeclineCheckpoint message, String taskManagerLocationInfo) {
if (shutdown || message == null) {
return;
}
if (!job.equals(message.getJob())) {
throw new IllegalArgumentException("Received DeclineCheckpoint message for job " +
message.getJob() + " from " + taskManagerLocationInfo + " while this coordinator handles job " + job);
}
final long checkpointId = message.getCheckpointId();
final String reason = (message.getReason() != null ? message.getReason().getMessage() : "");
PendingCheckpoint checkpoint;
synchronized (lock) {
if (shutdown) {
return;
}
checkpoint = pendingCheckpoints.remove(checkpointId);
if (checkpoint != null && !checkpoint.isDiscarded()) {
LOG.info("Decline checkpoint {} by task {} of job {} at {}.",
checkpointId,
message.getTaskExecutionId(),
job,
taskManagerLocationInfo);
discardCheckpoint(checkpoint, message.getReason());
}
else if (checkpoint != null) {
throw new IllegalStateException(
"Received message for discarded but non-removed checkpoint " + checkpointId);
}
else if (LOG.isDebugEnabled()) {
if (recentPendingCheckpoints.contains(checkpointId)) {
LOG.debug("Received another decline message for now expired checkpoint attempt {} from task {} of job {} at {} : {}",
checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason);
} else {
LOG.debug("Received decline message for unknown (too old?) checkpoint attempt {} from task {} of job {} at {} : {}",
checkpointId, message.getTaskExecutionId(), job, taskManagerLocationInfo, reason);
}
}
}
}
/**
* Receives an AcknowledgeCheckpoint message and returns whether the
* message was associated with a pending checkpoint.
*
* @param message Checkpoint ack from the task manager
*
* @param taskManagerLocationInfo The location of the acknowledge checkpoint message's sender
* @return Flag indicating whether the ack'd checkpoint was associated
* with a pending checkpoint.
*
* @throws CheckpointException If the checkpoint cannot be added to the completed checkpoint store.
*/
public boolean receiveAcknowledgeMessage(AcknowledgeCheckpoint message, String taskManagerLocationInfo) throws CheckpointException {
if (shutdown || message == null) {
return false;
}
if (!job.equals(message.getJob())) {
LOG.error("Received wrong AcknowledgeCheckpoint message for job {} from {} : {}", job, taskManagerLocationInfo, message);
return false;
}
final long checkpointId = message.getCheckpointId();
synchronized (lock) {
if (shutdown) {
return false;
}
final PendingCheckpoint checkpoint = pendingCheckpoints.get(checkpointId);
if (checkpoint != null && !checkpoint.isDiscarded()) {
switch (checkpoint.acknowledgeTask(message.getTaskExecutionId(), message.getSubtaskState(), message.getCheckpointMetrics())) {
case SUCCESS:
LOG.debug("Received acknowledge message for checkpoint {} from task {} of job {} at {}.",
checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);
if (checkpoint.isFullyAcknowledged()) {
completePendingCheckpoint(checkpoint);
}
break;
case DUPLICATE:
LOG.debug("Received a duplicate acknowledge message for checkpoint {}, task {}, job {}, location {}.",
message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);
break;
case UNKNOWN:
LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " +
"because the task's execution attempt id was unknown. Discarding " +
"the state handle to avoid lingering state.", message.getCheckpointId(),
message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);
discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());
break;
case DISCARDED:
LOG.warn("Could not acknowledge the checkpoint {} for task {} of job {} at {}, " +
"because the pending checkpoint had been discarded. Discarding the " +
"state handle tp avoid lingering state.",
message.getCheckpointId(), message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);
discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());
}
return true;
}
else if (checkpoint != null) {
throw new IllegalStateException(
"Received message for discarded but non-removed checkpoint " + checkpointId);
}
else {
boolean wasPendingCheckpoint;
if (recentPendingCheckpoints.contains(checkpointId)) {
wasPendingCheckpoint = true;
LOG.warn("Received late message for now expired checkpoint attempt {} from task " +
"{} of job {} at {}.", checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);
}
else {
LOG.debug("Received message for an unknown checkpoint {} from task {} of job {} at {}.",
checkpointId, message.getTaskExecutionId(), message.getJob(), taskManagerLocationInfo);
wasPendingCheckpoint = false;
}
discardSubtaskState(message.getJob(), message.getTaskExecutionId(), message.getCheckpointId(), message.getSubtaskState());
return wasPendingCheckpoint;
}
}
}
/**
* Try to complete the given pending checkpoint.
*
* <p>Important: This method should only be called in the checkpoint lock scope.
*
* @param pendingCheckpoint to complete
* @throws CheckpointException if the completion failed
*/
private void completePendingCheckpoint(PendingCheckpoint pendingCheckpoint) throws CheckpointException {
final long checkpointId = pendingCheckpoint.getCheckpointId();
final CompletedCheckpoint completedCheckpoint;
Map<OperatorID, OperatorState> operatorStates = pendingCheckpoint.getOperatorStates();
sharedStateRegistry.registerAll(operatorStates.values());
try {
try {
completedCheckpoint = pendingCheckpoint.finalizeCheckpoint();
failureManager.handleCheckpointSuccess(pendingCheckpoint.getCheckpointId());
}
catch (Exception e1) {
if (!pendingCheckpoint.isDiscarded()) {
failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1);
}
throw new CheckpointException("Could not finalize the pending checkpoint " + checkpointId + '.',
CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, e1);
}
Preconditions.checkState(pendingCheckpoint.isDiscarded() && completedCheckpoint != null);
try {
completedCheckpointStore.addCheckpoint(completedCheckpoint);
} catch (Exception exception) {
executor.execute(new Runnable() {
@Override
public void run() {
try {
completedCheckpoint.discardOnFailedStoring();
} catch (Throwable t) {
LOG.warn("Could not properly discard completed checkpoint {}.", completedCheckpoint.getCheckpointID(), t);
}
}
});
throw new CheckpointException("Could not complete the pending checkpoint " + checkpointId + '.',
CheckpointFailureReason.FINALIZE_CHECKPOINT_FAILURE, exception);
}
} finally {
pendingCheckpoints.remove(checkpointId);
triggerQueuedRequests();
}
rememberRecentCheckpointId(checkpointId);
dropSubsumedCheckpoints(checkpointId);
lastCheckpointCompletionNanos = System.nanoTime();
LOG.info("Completed checkpoint {} for job {} ({} bytes in {} ms).", checkpointId, job,
completedCheckpoint.getStateSize(), completedCheckpoint.getDuration());
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder();
builder.append("Checkpoint state: ");
for (OperatorState state : completedCheckpoint.getOperatorStates().values()) {
builder.append(state);
builder.append(", ");
}
builder.setLength(builder.length() - 2);
LOG.debug(builder.toString());
}
final long timestamp = completedCheckpoint.getTimestamp();
for (ExecutionVertex ev : tasksToCommitTo) {
Execution ee = ev.getCurrentExecutionAttempt();
if (ee != null) {
ee.notifyCheckpointComplete(checkpointId, timestamp);
}
}
}
/**
* Fails all pending checkpoints which have not been acknowledged by the given execution
* attempt id.
*
* @param executionAttemptId for which to discard unacknowledged pending checkpoints
* @param cause of the failure
*/
public void failUnacknowledgedPendingCheckpointsFor(ExecutionAttemptID executionAttemptId, Throwable cause) {
synchronized (lock) {
Iterator<PendingCheckpoint> pendingCheckpointIterator = pendingCheckpoints.values().iterator();
while (pendingCheckpointIterator.hasNext()) {
final PendingCheckpoint pendingCheckpoint = pendingCheckpointIterator.next();
if (!pendingCheckpoint.isAcknowledgedBy(executionAttemptId)) {
pendingCheckpointIterator.remove();
discardCheckpoint(pendingCheckpoint, cause);
}
}
}
}
private void rememberRecentCheckpointId(long id) {
if (recentPendingCheckpoints.size() >= NUM_GHOST_CHECKPOINT_IDS) {
recentPendingCheckpoints.removeFirst();
}
recentPendingCheckpoints.addLast(id);
}
private void dropSubsumedCheckpoints(long checkpointId) {
Iterator<Map.Entry<Long, PendingCheckpoint>> entries = pendingCheckpoints.entrySet().iterator();
while (entries.hasNext()) {
PendingCheckpoint p = entries.next().getValue();
if (p.getCheckpointId() < checkpointId && p.canBeSubsumed()) {
rememberRecentCheckpointId(p.getCheckpointId());
failPendingCheckpoint(p, CheckpointFailureReason.CHECKPOINT_SUBSUMED);
entries.remove();
}
}
}
/**
* Triggers the queued request, if there is one.
*
* <p>NOTE: The caller of this method must hold the lock when invoking the method!
*/
private void triggerQueuedRequests() {
if (triggerRequestQueued) {
triggerRequestQueued = false;
if (periodicScheduling) {
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
}
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(),
0L, baseInterval, TimeUnit.MILLISECONDS);
}
else {
timer.execute(new ScheduledTrigger());
}
}
}
@VisibleForTesting
int getNumScheduledTasks() {
return timer.getQueue().size();
}
/**
* Restores the latest checkpointed state.
*
* @param tasks Map of job vertices to restore. State for these vertices is
* restored via {@link Execution
* @param errorIfNoCheckpoint Fail if no completed checkpoint is available to
* restore from.
* @param allowNonRestoredState Allow checkpoint state that cannot be mapped
* to any job vertex in tasks.
* @return <code>true</code> if state was restored, <code>false</code> otherwise.
* @throws IllegalStateException If the CheckpointCoordinator is shut down.
* @throws IllegalStateException If no completed checkpoint is available and
* the <code>failIfNoCheckpoint</code> flag has been set.
* @throws IllegalStateException If the checkpoint contains state that cannot be
* mapped to any job vertex in <code>tasks</code> and the
* <code>allowNonRestoredState</code> flag has not been set.
* @throws IllegalStateException If the max parallelism changed for an operator
* that restores state from this checkpoint.
* @throws IllegalStateException If the parallelism changed for an operator
* that restores <i>non-partitioned</i> state from this
* checkpoint.
*/
public boolean restoreLatestCheckpointedState(
Map<JobVertexID, ExecutionJobVertex> tasks,
boolean errorIfNoCheckpoint,
boolean allowNonRestoredState) throws Exception {
synchronized (lock) {
if (shutdown) {
throw new IllegalStateException("CheckpointCoordinator is shut down");
}
sharedStateRegistry.close();
sharedStateRegistry = sharedStateRegistryFactory.create(executor);
completedCheckpointStore.recover();
for (CompletedCheckpoint completedCheckpoint : completedCheckpointStore.getAllCheckpoints()) {
completedCheckpoint.registerSharedStatesAfterRestored(sharedStateRegistry);
}
LOG.debug("Status of the shared state registry of job {} after restore: {}.", job, sharedStateRegistry);
CompletedCheckpoint latest = completedCheckpointStore.getLatestCheckpoint(isPreferCheckpointForRecovery);
if (latest == null) {
if (errorIfNoCheckpoint) {
throw new IllegalStateException("No completed checkpoint available");
} else {
LOG.debug("Resetting the master hooks.");
MasterHooks.reset(masterHooks.values(), LOG);
return false;
}
}
LOG.info("Restoring job {} from latest valid checkpoint: {}.", job, latest);
final Map<OperatorID, OperatorState> operatorStates = latest.getOperatorStates();
StateAssignmentOperation stateAssignmentOperation =
new StateAssignmentOperation(latest.getCheckpointID(), tasks, operatorStates, allowNonRestoredState);
stateAssignmentOperation.assignStates();
MasterHooks.restoreMasterHooks(
masterHooks,
latest.getMasterHookStates(),
latest.getCheckpointID(),
allowNonRestoredState,
LOG);
if (statsTracker != null) {
long restoreTimestamp = System.currentTimeMillis();
RestoredCheckpointStats restored = new RestoredCheckpointStats(
latest.getCheckpointID(),
latest.getProperties(),
restoreTimestamp,
latest.getExternalPointer());
statsTracker.reportRestoredCheckpoint(restored);
}
return true;
}
}
/**
* Restore the state with given savepoint.
*
* @param savepointPointer The pointer to the savepoint.
* @param allowNonRestored True if allowing checkpoint state that cannot be
* mapped to any job vertex in tasks.
* @param tasks Map of job vertices to restore. State for these
* vertices is restored via
* {@link Execution
* @param userClassLoader The class loader to resolve serialized classes in
* legacy savepoint versions.
*/
public boolean restoreSavepoint(
String savepointPointer,
boolean allowNonRestored,
Map<JobVertexID, ExecutionJobVertex> tasks,
ClassLoader userClassLoader) throws Exception {
Preconditions.checkNotNull(savepointPointer, "The savepoint path cannot be null.");
LOG.info("Starting job {} from savepoint {} ({})",
job, savepointPointer, (allowNonRestored ? "allowing non restored state" : ""));
final CompletedCheckpointStorageLocation checkpointLocation = checkpointStorage.resolveCheckpoint(savepointPointer);
CompletedCheckpoint savepoint = Checkpoints.loadAndValidateCheckpoint(
job, tasks, checkpointLocation, userClassLoader, allowNonRestored);
completedCheckpointStore.addCheckpoint(savepoint);
long nextCheckpointId = savepoint.getCheckpointID() + 1;
checkpointIdCounter.setCount(nextCheckpointId);
LOG.info("Reset the checkpoint ID of job {} to {}.", job, nextCheckpointId);
return restoreLatestCheckpointedState(tasks, true, allowNonRestored);
}
public int getNumberOfPendingCheckpoints() {
return this.pendingCheckpoints.size();
}
public int getNumberOfRetainedSuccessfulCheckpoints() {
synchronized (lock) {
return completedCheckpointStore.getNumberOfRetainedCheckpoints();
}
}
public Map<Long, PendingCheckpoint> getPendingCheckpoints() {
synchronized (lock) {
return new HashMap<>(this.pendingCheckpoints);
}
}
public List<CompletedCheckpoint> getSuccessfulCheckpoints() throws Exception {
synchronized (lock) {
return completedCheckpointStore.getAllCheckpoints();
}
}
public CheckpointStorageCoordinatorView getCheckpointStorage() {
return checkpointStorage;
}
public CompletedCheckpointStore getCheckpointStore() {
return completedCheckpointStore;
}
public CheckpointIDCounter getCheckpointIdCounter() {
return checkpointIdCounter;
}
public long getCheckpointTimeout() {
return checkpointTimeout;
}
/**
* Returns whether periodic checkpointing has been configured.
*
* @return <code>true</code> if periodic checkpoints have been configured.
*/
public boolean isPeriodicCheckpointingConfigured() {
return baseInterval != Long.MAX_VALUE;
}
public void startCheckpointScheduler() {
synchronized (lock) {
if (shutdown) {
throw new IllegalArgumentException("Checkpoint coordinator is shut down");
}
stopCheckpointScheduler();
periodicScheduling = true;
long initialDelay = ThreadLocalRandom.current().nextLong(
minPauseBetweenCheckpointsNanos / 1_000_000L, baseInterval + 1L);
currentPeriodicTrigger = timer.scheduleAtFixedRate(
new ScheduledTrigger(), initialDelay, baseInterval, TimeUnit.MILLISECONDS);
}
}
public void stopCheckpointScheduler() {
synchronized (lock) {
triggerRequestQueued = false;
periodicScheduling = false;
if (currentPeriodicTrigger != null) {
currentPeriodicTrigger.cancel(false);
currentPeriodicTrigger = null;
}
abortPendingCheckpoints(new CheckpointException(CheckpointFailureReason.CHECKPOINT_COORDINATOR_SUSPEND));
numUnsuccessfulCheckpointsTriggers.set(0);
}
}
/**
* Aborts all the pending checkpoints due to en exception.
* @param exception The exception.
*/
public void abortPendingCheckpoints(CheckpointException exception) {
synchronized (lock) {
for (PendingCheckpoint p : pendingCheckpoints.values()) {
failPendingCheckpoint(p, exception.getCheckpointFailureReason());
}
pendingCheckpoints.clear();
}
}
public JobStatusListener createActivatorDeactivator() {
synchronized (lock) {
if (shutdown) {
throw new IllegalArgumentException("Checkpoint coordinator is shut down");
}
if (jobStatusListener == null) {
jobStatusListener = new CheckpointCoordinatorDeActivator(this);
}
return jobStatusListener;
}
}
private final class ScheduledTrigger implements Runnable {
@Override
public void run() {
try {
triggerCheckpoint(System.currentTimeMillis(), true);
}
catch (Exception e) {
LOG.error("Exception while triggering checkpoint for job {}.", job, e);
}
}
}
/**
* Discards the given pending checkpoint because of the given cause.
*
* @param pendingCheckpoint to discard
* @param cause for discarding the checkpoint
*/
private void discardCheckpoint(PendingCheckpoint pendingCheckpoint, @Nullable Throwable cause) {
assert(Thread.holdsLock(lock));
Preconditions.checkNotNull(pendingCheckpoint);
final long checkpointId = pendingCheckpoint.getCheckpointId();
LOG.info("Discarding checkpoint {} of job {}.", checkpointId, job, cause);
if (cause == null) {
failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.CHECKPOINT_DECLINED);
} else if (cause instanceof CheckpointException) {
CheckpointException exception = (CheckpointException) cause;
failPendingCheckpoint(pendingCheckpoint, exception.getCheckpointFailureReason(), cause);
} else {
failPendingCheckpoint(pendingCheckpoint, CheckpointFailureReason.JOB_FAILURE, cause);
}
rememberRecentCheckpointId(checkpointId);
boolean haveMoreRecentPending = false;
for (PendingCheckpoint p : pendingCheckpoints.values()) {
if (!p.isDiscarded() && p.getCheckpointId() >= pendingCheckpoint.getCheckpointId()) {
haveMoreRecentPending = true;
break;
}
}
if (!haveMoreRecentPending) {
triggerQueuedRequests();
}
}
/**
* Discards the given state object asynchronously belonging to the given job, execution attempt
* id and checkpoint id.
*
* @param jobId identifying the job to which the state object belongs
* @param executionAttemptID identifying the task to which the state object belongs
* @param checkpointId of the state object
* @param subtaskState to discard asynchronously
*/
private void discardSubtaskState(
final JobID jobId,
final ExecutionAttemptID executionAttemptID,
final long checkpointId,
final TaskStateSnapshot subtaskState) {
if (subtaskState != null) {
executor.execute(new Runnable() {
@Override
public void run() {
try {
subtaskState.discardState();
} catch (Throwable t2) {
LOG.warn("Could not properly discard state object of checkpoint {} " +
"belonging to task {} of job {}.", checkpointId, executionAttemptID, jobId, t2);
}
}
});
}
}
private void failPendingCheckpoint(
final PendingCheckpoint pendingCheckpoint,
final CheckpointFailureReason reason,
final Throwable cause) {
CheckpointException exception = new CheckpointException(reason, cause);
pendingCheckpoint.abort(reason, cause);
failureManager.handleCheckpointException(exception, pendingCheckpoint.getCheckpointId());
}
private void failPendingCheckpoint(
final PendingCheckpoint pendingCheckpoint,
final CheckpointFailureReason reason) {
failPendingCheckpoint(pendingCheckpoint, reason, null);
}
} |
The only place you use `sparkExecutorID` is to log the `onExecutorAdded` event. So I imagine there should be one `onExecutorAdded` event per distinct `sparkExecutorID`. | public PortablePipelineResult run(RunnerApi.Pipeline pipeline, JobInfo jobInfo) {
SparkPortablePipelineTranslator translator;
boolean isStreaming = pipelineOptions.isStreaming() || hasUnboundedPCollections(pipeline);
if (isStreaming) {
translator = new SparkStreamingPortablePipelineTranslator();
} else {
translator = new SparkBatchPortablePipelineTranslator();
}
Pipeline pipelineWithSdfExpanded =
ProtoOverrides.updateTransform(
PTransformTranslation.PAR_DO_TRANSFORM_URN,
pipeline,
SplittableParDoExpander.createSizedReplacement());
Pipeline trimmedPipeline =
TrivialNativeTransformExpander.forKnownUrns(
pipelineWithSdfExpanded, translator.knownUrns());
RunnerApi.Pipeline fusedPipeline =
trimmedPipeline.getComponents().getTransformsMap().values().stream()
.anyMatch(proto -> ExecutableStage.URN.equals(proto.getSpec().getUrn()))
? trimmedPipeline
: GreedyPipelineFuser.fuse(trimmedPipeline).toPipeline();
if (pipelineOptions.getFilesToStage() == null) {
pipelineOptions.setFilesToStage(
detectClassPathResourcesToStage(
SparkPipelineRunner.class.getClassLoader(), pipelineOptions));
LOG.info(
"PipelineOptions.filesToStage was not specified. Defaulting to files from the classpath");
}
prepareFilesToStage(pipelineOptions);
LOG.info(
"Will stage {} files. (Enable logging at DEBUG level to see which files will be staged.)",
pipelineOptions.getFilesToStage().size());
LOG.debug("Staging files: {}", pipelineOptions.getFilesToStage());
PortablePipelineResult result;
final JavaSparkContext jsc = SparkContextFactory.getSparkContext(pipelineOptions);
EventLoggingListener eventLoggingListener;
String jobId = jobInfo.jobId();
String jobName = jobInfo.jobName();
Long startTime = jsc.startTime();
String sparkUser = jsc.sparkUser();
String sparkMaster = "";
String sparkExecutorID = "";
Tuple2<String, String>[] sparkConfList = jsc.getConf().getAll();
for (Tuple2<String, String> sparkConf : sparkConfList) {
if (sparkConf._1().equals("spark.master")) {
sparkMaster = sparkConf._2();
} else if (sparkConf._1().equals("spark.executor.id")) {
sparkExecutorID = sparkConf._2();
}
}
try {
URI eventLogDirectory = new URI(pipelineOptions.getSparkHistoryDir());
File eventLogDirectoryFile = new File(eventLogDirectory.getPath());
if (eventLogDirectoryFile.exists() && eventLogDirectoryFile.isDirectory()) {
eventLoggingListener =
new EventLoggingListener(
jobId,
new scala.Option<String>() {
@Override
public boolean isEmpty() {
return false;
}
@Override
public String get() {
return jobName;
}
@Override
public Object productElement(int i) {
return null;
}
@Override
public int productArity() {
return 0;
}
@Override
public boolean canEqual(Object o) {
return false;
}
},
eventLogDirectory,
jsc.getConf(),
jsc.hadoopConfiguration());
} else {
eventLoggingListener = null;
}
} catch (URISyntaxException e) {
e.printStackTrace();
eventLoggingListener = null;
}
if (eventLoggingListener != null) {
eventLoggingListener.initializeLogIfNecessary(false, false);
eventLoggingListener.start();
scala.collection.immutable.Map<String, String> logUrlMap =
new scala.collection.immutable.HashMap<String, String>();
eventLoggingListener.onExecutorAdded(
new SparkListenerExecutorAdded(
Instant.now().getMillis(),
sparkExecutorID,
new ExecutorInfo(sparkMaster, 0, logUrlMap)));
}
LOG.info(String.format("Running job %s on Spark master %s", jobInfo.jobId(), jsc.master()));
AggregatorsAccumulator.init(pipelineOptions, jsc);
MetricsEnvironment.setMetricsSupported(true);
MetricsAccumulator.init(pipelineOptions, jsc);
final SparkTranslationContext context =
translator.createTranslationContext(jsc, pipelineOptions, jobInfo);
final ExecutorService executorService = Executors.newSingleThreadExecutor();
LOG.info(String.format("Running job %s on Spark master %s", jobInfo.jobId(), jsc.master()));
if (isStreaming) {
final JavaStreamingContext jssc =
((SparkStreamingTranslationContext) context).getStreamingContext();
jssc.addStreamingListener(
new JavaStreamingListenerWrapper(
new AggregatorsAccumulator.AccumulatorCheckpointingSparkListener()));
jssc.addStreamingListener(
new JavaStreamingListenerWrapper(
new MetricsAccumulator.AccumulatorCheckpointingSparkListener()));
for (JavaStreamingListener listener :
pipelineOptions.as(SparkContextOptions.class).getListeners()) {
LOG.info("Registered listener {}." + listener.getClass().getSimpleName());
jssc.addStreamingListener(new JavaStreamingListenerWrapper(listener));
}
jssc.addStreamingListener(
new JavaStreamingListenerWrapper(
new GlobalWatermarkHolder.WatermarkAdvancingStreamingListener()));
jssc.checkpoint(pipelineOptions.getCheckpointDir());
Long timeout =
pipelineOptions.as(SparkPortableStreamingPipelineOptions.class).getStreamingTimeoutMs();
final Future<?> submissionFuture =
executorService.submit(
() -> {
translator.translate(fusedPipeline, context);
LOG.info(
String.format(
"Job %s: Pipeline translated successfully. Computing outputs",
jobInfo.jobId()));
context.computeOutputs();
jssc.start();
try {
jssc.awaitTerminationOrTimeout(timeout);
} catch (InterruptedException e) {
LOG.warn("Streaming context interrupted, shutting down.", e);
}
jssc.stop();
LOG.info(String.format("Job %s finished.", jobInfo.jobId()));
});
result = new SparkPipelineResult.PortableStreamingMode(submissionFuture, jssc);
} else {
final Future<?> submissionFuture =
executorService.submit(
() -> {
translator.translate(fusedPipeline, context);
LOG.info(
String.format(
"Job %s: Pipeline translated successfully. Computing outputs",
jobInfo.jobId()));
context.computeOutputs();
LOG.info(String.format("Job %s finished.", jobInfo.jobId()));
});
result = new SparkPipelineResult.PortableBatchMode(submissionFuture, jsc);
}
executorService.shutdown();
result.waitUntilFinish();
MetricsPusher metricsPusher =
new MetricsPusher(
MetricsAccumulator.getInstance().value(),
pipelineOptions.as(MetricsOptions.class),
result);
metricsPusher.start();
if (eventLoggingListener != null) {
HashMap<String, String> driverLogs = new HashMap<String, String>();
MetricResults metricResults = result.metrics();
for (MetricResult<DistributionResult> distributionResultMetricResult :
metricResults.allMetrics().getDistributions()) {
MetricName metricName = distributionResultMetricResult.getName();
DistributionResult distributionResult = distributionResultMetricResult.getAttempted();
if (distributionResult != null) {
long min = distributionResult.getMin();
long max = distributionResult.getMax();
long count = distributionResult.getCount();
double mean = distributionResult.getMean();
String value =
"min: "
+ Long.toString(min)
+ ", max: "
+ Long.toString(max)
+ ", count: "
+ Long.toString(count)
+ ", mean: "
+ Double.toString(mean);
driverLogs.put(metricName.toString(), value);
}
}
eventLoggingListener.onApplicationStart(
new SparkListenerApplicationStart(
jobId,
new scala.Option<String>() {
@Override
public boolean isEmpty() {
return false;
}
@Override
public String get() {
return jobName;
}
@Override
public Object productElement(int i) {
return null;
}
@Override
public int productArity() {
return 0;
}
@Override
public boolean canEqual(Object o) {
return false;
}
},
startTime,
sparkUser,
new scala.Option<String>() {
@Override
public boolean isEmpty() {
return false;
}
@Override
public String get() {
return jobName;
}
@Override
public Object productElement(int i) {
return null;
}
@Override
public int productArity() {
return 0;
}
@Override
public boolean canEqual(Object o) {
return false;
}
},
new scala.Option<Map<String, String>>() {
@Override
public boolean isEmpty() {
return false;
}
@Override
public Map<String, String> get() {
return JavaConverters.mapAsScalaMapConverter(driverLogs).asScala();
}
@Override
public Object productElement(int i) {
return null;
}
@Override
public int productArity() {
return 0;
}
@Override
public boolean canEqual(Object o) {
return false;
}
}));
eventLoggingListener.onApplicationEnd(
new SparkListenerApplicationEnd(Instant.now().getMillis()));
eventLoggingListener.stop();
}
return result;
} | sparkExecutorID = sparkConf._2(); | public PortablePipelineResult run(RunnerApi.Pipeline pipeline, JobInfo jobInfo)
throws URISyntaxException {
SparkPortablePipelineTranslator translator;
boolean isStreaming = pipelineOptions.isStreaming() || hasUnboundedPCollections(pipeline);
if (isStreaming) {
translator = new SparkStreamingPortablePipelineTranslator();
} else {
translator = new SparkBatchPortablePipelineTranslator();
}
Pipeline pipelineWithSdfExpanded =
ProtoOverrides.updateTransform(
PTransformTranslation.PAR_DO_TRANSFORM_URN,
pipeline,
SplittableParDoExpander.createSizedReplacement());
Pipeline trimmedPipeline =
TrivialNativeTransformExpander.forKnownUrns(
pipelineWithSdfExpanded, translator.knownUrns());
RunnerApi.Pipeline fusedPipeline =
trimmedPipeline.getComponents().getTransformsMap().values().stream()
.anyMatch(proto -> ExecutableStage.URN.equals(proto.getSpec().getUrn()))
? trimmedPipeline
: GreedyPipelineFuser.fuse(trimmedPipeline).toPipeline();
if (pipelineOptions.getFilesToStage() == null) {
pipelineOptions.setFilesToStage(
detectClassPathResourcesToStage(
SparkPipelineRunner.class.getClassLoader(), pipelineOptions));
LOG.info(
"PipelineOptions.filesToStage was not specified. Defaulting to files from the classpath");
}
prepareFilesToStage(pipelineOptions);
LOG.info(
"Will stage {} files. (Enable logging at DEBUG level to see which files will be staged.)",
pipelineOptions.getFilesToStage().size());
LOG.debug("Staging files: {}", pipelineOptions.getFilesToStage());
PortablePipelineResult result;
final JavaSparkContext jsc = SparkContextFactory.getSparkContext(pipelineOptions);
long startTime = Instant.now().getMillis();
EventLoggingListener eventLoggingListener = null;
if (pipelineOptions.getEventLogEnabled()) {
eventLoggingListener =
new EventLoggingListener(
jsc.getConf().getAppId(),
scala.Option.apply("1"),
new URI(pipelineOptions.getSparkHistoryDir()),
jsc.getConf(),
jsc.hadoopConfiguration());
eventLoggingListener.initializeLogIfNecessary(false, false);
eventLoggingListener.start();
scala.collection.immutable.Map<String, String> logUrlMap =
new scala.collection.immutable.HashMap<String, String>();
Tuple2<String, String>[] sparkMasters = jsc.getConf().getAllWithPrefix("spark.master");
Tuple2<String, String>[] sparkExecutors = jsc.getConf().getAllWithPrefix("spark.executor.id");
for (Tuple2<String, String> sparkExecutor : sparkExecutors) {
eventLoggingListener.onExecutorAdded(
new SparkListenerExecutorAdded(
startTime,
sparkExecutor._2(),
new ExecutorInfo(sparkMasters[0]._2(), 0, logUrlMap)));
}
}
LOG.info(String.format("Running job %s on Spark master %s", jobInfo.jobId(), jsc.master()));
AggregatorsAccumulator.init(pipelineOptions, jsc);
MetricsEnvironment.setMetricsSupported(true);
MetricsAccumulator.init(pipelineOptions, jsc);
final SparkTranslationContext context =
translator.createTranslationContext(jsc, pipelineOptions, jobInfo);
final ExecutorService executorService = Executors.newSingleThreadExecutor();
LOG.info(String.format("Running job %s on Spark master %s", jobInfo.jobId(), jsc.master()));
if (isStreaming) {
final JavaStreamingContext jssc =
((SparkStreamingTranslationContext) context).getStreamingContext();
jssc.addStreamingListener(
new JavaStreamingListenerWrapper(
new AggregatorsAccumulator.AccumulatorCheckpointingSparkListener()));
jssc.addStreamingListener(
new JavaStreamingListenerWrapper(
new MetricsAccumulator.AccumulatorCheckpointingSparkListener()));
for (JavaStreamingListener listener :
pipelineOptions.as(SparkContextOptions.class).getListeners()) {
LOG.info("Registered listener {}." + listener.getClass().getSimpleName());
jssc.addStreamingListener(new JavaStreamingListenerWrapper(listener));
}
jssc.addStreamingListener(
new JavaStreamingListenerWrapper(
new GlobalWatermarkHolder.WatermarkAdvancingStreamingListener()));
jssc.checkpoint(pipelineOptions.getCheckpointDir());
Long timeout =
pipelineOptions.as(SparkPortableStreamingPipelineOptions.class).getStreamingTimeoutMs();
final Future<?> submissionFuture =
executorService.submit(
() -> {
translator.translate(fusedPipeline, context);
LOG.info(
String.format(
"Job %s: Pipeline translated successfully. Computing outputs",
jobInfo.jobId()));
context.computeOutputs();
jssc.start();
try {
jssc.awaitTerminationOrTimeout(timeout);
} catch (InterruptedException e) {
LOG.warn("Streaming context interrupted, shutting down.", e);
}
jssc.stop();
LOG.info(String.format("Job %s finished.", jobInfo.jobId()));
});
result = new SparkPipelineResult.PortableStreamingMode(submissionFuture, jssc);
} else {
final Future<?> submissionFuture =
executorService.submit(
() -> {
translator.translate(fusedPipeline, context);
LOG.info(
String.format(
"Job %s: Pipeline translated successfully. Computing outputs",
jobInfo.jobId()));
context.computeOutputs();
LOG.info(String.format("Job %s finished.", jobInfo.jobId()));
});
result = new SparkPipelineResult.PortableBatchMode(submissionFuture, jsc);
}
executorService.shutdown();
result.waitUntilFinish();
MetricsPusher metricsPusher =
new MetricsPusher(
MetricsAccumulator.getInstance().value(),
pipelineOptions.as(MetricsOptions.class),
result);
metricsPusher.start();
if (pipelineOptions.getEventLogEnabled()) {
eventLoggingListener.onApplicationStart(
new SparkListenerApplicationStart(
pipelineOptions.as(ApplicationNameOptions.class).getAppName(),
scala.Option.apply(jsc.getConf().getAppId()),
startTime,
jsc.sparkUser(),
scala.Option.apply("1"),
scala.Option.apply(
JavaConverters.mapAsScalaMapConverter(
SparkBeamMetric.renderAllToString(result.metrics()))
.asScala())));
eventLoggingListener.onApplicationEnd(
new SparkListenerApplicationEnd(Instant.now().getMillis()));
eventLoggingListener.stop();
}
return result;
} | class SparkPipelineRunner implements PortablePipelineRunner {
private static final Logger LOG = LoggerFactory.getLogger(SparkPipelineRunner.class);
private final SparkPipelineOptions pipelineOptions;
public SparkPipelineRunner(SparkPipelineOptions pipelineOptions) {
this.pipelineOptions = pipelineOptions;
}
@Override
/**
* Main method to be called only as the entry point to an executable jar with structure as defined
* in {@link PortablePipelineJarUtils}.
*/
public static void main(String[] args) throws Exception {
FileSystems.setDefaultPipelineOptions(PipelineOptionsFactory.create());
SparkPipelineRunnerConfiguration configuration = parseArgs(args);
String baseJobName =
configuration.baseJobName == null
? PortablePipelineJarUtils.getDefaultJobName()
: configuration.baseJobName;
Preconditions.checkArgument(
baseJobName != null,
"No default job name found. Job name must be set using --base-job-name.");
Pipeline pipeline = PortablePipelineJarUtils.getPipelineFromClasspath(baseJobName);
Struct originalOptions = PortablePipelineJarUtils.getPipelineOptionsFromClasspath(baseJobName);
String retrievalToken =
ArtifactApi.CommitManifestResponse.Constants.NO_ARTIFACTS_STAGED_TOKEN
.getValueDescriptor()
.getOptions()
.getExtension(RunnerApi.beamConstant);
SparkPipelineOptions sparkOptions =
PipelineOptionsTranslation.fromProto(originalOptions).as(SparkPipelineOptions.class);
String invocationId =
String.format("%s_%s", sparkOptions.getJobName(), UUID.randomUUID().toString());
if (sparkOptions.getAppName() == null) {
LOG.debug("App name was null. Using invocationId {}", invocationId);
sparkOptions.setAppName(invocationId);
}
SparkPipelineRunner runner = new SparkPipelineRunner(sparkOptions);
JobInfo jobInfo =
JobInfo.create(
invocationId,
sparkOptions.getJobName(),
retrievalToken,
PipelineOptionsTranslation.toProto(sparkOptions));
try {
runner.run(pipeline, jobInfo);
} catch (Exception e) {
throw new RuntimeException(String.format("Job %s failed.", invocationId), e);
}
LOG.info("Job {} finished successfully.", invocationId);
}
private static class SparkPipelineRunnerConfiguration {
@Option(
name = "--base-job-name",
usage =
"The job to run. This must correspond to a subdirectory of the jar's BEAM-PIPELINE "
+ "directory. *Only needs to be specified if the jar contains multiple pipelines.*")
private String baseJobName = null;
@Option(
name = "--spark-history-dir",
usage = "Spark history dir to store logs (e.g. /tmp/spark-events/)")
private String sparkHistoryDir = "/tmp/spark-events/";
String getSparkHistoryDir() {
return this.sparkHistoryDir;
}
}
private static SparkPipelineRunnerConfiguration parseArgs(String[] args) {
SparkPipelineRunnerConfiguration configuration = new SparkPipelineRunnerConfiguration();
CmdLineParser parser = new CmdLineParser(configuration);
try {
parser.parseArgument(args);
} catch (CmdLineException e) {
LOG.error("Unable to parse command line arguments.", e);
parser.printUsage(System.err);
throw new IllegalArgumentException("Unable to parse command line arguments.", e);
}
return configuration;
}
} | class SparkPipelineRunner implements PortablePipelineRunner {
private static final Logger LOG = LoggerFactory.getLogger(SparkPipelineRunner.class);
private final SparkPipelineOptions pipelineOptions;
public SparkPipelineRunner(SparkPipelineOptions pipelineOptions) {
this.pipelineOptions = pipelineOptions;
}
@Override
/**
* Main method to be called only as the entry point to an executable jar with structure as defined
* in {@link PortablePipelineJarUtils}.
*/
public static void main(String[] args) throws Exception {
FileSystems.setDefaultPipelineOptions(PipelineOptionsFactory.create());
SparkPipelineRunnerConfiguration configuration = parseArgs(args);
String baseJobName =
configuration.baseJobName == null
? PortablePipelineJarUtils.getDefaultJobName()
: configuration.baseJobName;
Preconditions.checkArgument(
baseJobName != null,
"No default job name found. Job name must be set using --base-job-name.");
Pipeline pipeline = PortablePipelineJarUtils.getPipelineFromClasspath(baseJobName);
Struct originalOptions = PortablePipelineJarUtils.getPipelineOptionsFromClasspath(baseJobName);
String retrievalToken =
ArtifactApi.CommitManifestResponse.Constants.NO_ARTIFACTS_STAGED_TOKEN
.getValueDescriptor()
.getOptions()
.getExtension(RunnerApi.beamConstant);
SparkPipelineOptions sparkOptions =
PipelineOptionsTranslation.fromProto(originalOptions).as(SparkPipelineOptions.class);
String invocationId =
String.format("%s_%s", sparkOptions.getJobName(), UUID.randomUUID().toString());
if (sparkOptions.getAppName() == null) {
LOG.debug("App name was null. Using invocationId {}", invocationId);
sparkOptions.setAppName(invocationId);
}
SparkPipelineRunner runner = new SparkPipelineRunner(sparkOptions);
JobInfo jobInfo =
JobInfo.create(
invocationId,
sparkOptions.getJobName(),
retrievalToken,
PipelineOptionsTranslation.toProto(sparkOptions));
try {
runner.run(pipeline, jobInfo);
} catch (Exception e) {
throw new RuntimeException(String.format("Job %s failed.", invocationId), e);
}
LOG.info("Job {} finished successfully.", invocationId);
}
private static class SparkPipelineRunnerConfiguration {
@Option(
name = "--base-job-name",
usage =
"The job to run. This must correspond to a subdirectory of the jar's BEAM-PIPELINE "
+ "directory. *Only needs to be specified if the jar contains multiple pipelines.*")
private String baseJobName = null;
}
private static SparkPipelineRunnerConfiguration parseArgs(String[] args) {
SparkPipelineRunnerConfiguration configuration = new SparkPipelineRunnerConfiguration();
CmdLineParser parser = new CmdLineParser(configuration);
try {
parser.parseArgument(args);
} catch (CmdLineException e) {
LOG.error("Unable to parse command line arguments.", e);
parser.printUsage(System.err);
throw new IllegalArgumentException("Unable to parse command line arguments.", e);
}
return configuration;
}
} |
Yes, I think this is fine. Once the migration is complete we can remove these conditions. | static boolean useUnifiedWorker(DataflowPipelineOptions options) {
return hasExperiment(options, "beam_fn_api")
|| hasExperiment(options, "use_runner_v2")
|| hasExperiment(options, "use_unified_worker")
|| hasExperiment(options, "enable_prime");
} | || hasExperiment(options, "enable_prime"); | static boolean useUnifiedWorker(DataflowPipelineOptions options) {
return hasExperiment(options, "beam_fn_api")
|| hasExperiment(options, "use_runner_v2")
|| hasExperiment(options, "use_unified_worker")
|| hasExperiment(options, "enable_prime");
} | class StreamingShardedWriteFactory<UserT, DestinationT, OutputT>
implements PTransformOverrideFactory<
PCollection<UserT>,
WriteFilesResult<DestinationT>,
WriteFiles<UserT, DestinationT, OutputT>> {
static final int DEFAULT_NUM_SHARDS = 10;
DataflowPipelineWorkerPoolOptions options;
StreamingShardedWriteFactory(PipelineOptions options) {
this.options = options.as(DataflowPipelineWorkerPoolOptions.class);
}
@Override
public PTransformReplacement<PCollection<UserT>, WriteFilesResult<DestinationT>>
getReplacementTransform(
AppliedPTransform<
PCollection<UserT>,
WriteFilesResult<DestinationT>,
WriteFiles<UserT, DestinationT, OutputT>>
transform) {
int numShards;
if (options.getMaxNumWorkers() > 0) {
numShards = options.getMaxNumWorkers() * 2;
} else if (options.getNumWorkers() > 0) {
numShards = options.getNumWorkers() * 2;
} else {
numShards = DEFAULT_NUM_SHARDS;
}
try {
List<PCollectionView<?>> sideInputs =
WriteFilesTranslation.getDynamicDestinationSideInputs(transform);
FileBasedSink sink = WriteFilesTranslation.getSink(transform);
WriteFiles<UserT, DestinationT, OutputT> replacement =
WriteFiles.to(sink).withSideInputs(sideInputs);
if (WriteFilesTranslation.isWindowedWrites(transform)) {
replacement = replacement.withWindowedWrites();
}
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
replacement.withNumShards(numShards));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public Map<PCollection<?>, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PCollection<?>> outputs, WriteFilesResult<DestinationT> newOutput) {
return ReplacementOutputs.tagged(outputs, newOutput);
}
} | class StreamingShardedWriteFactory<UserT, DestinationT, OutputT>
implements PTransformOverrideFactory<
PCollection<UserT>,
WriteFilesResult<DestinationT>,
WriteFiles<UserT, DestinationT, OutputT>> {
static final int DEFAULT_NUM_SHARDS = 10;
DataflowPipelineWorkerPoolOptions options;
StreamingShardedWriteFactory(PipelineOptions options) {
this.options = options.as(DataflowPipelineWorkerPoolOptions.class);
}
@Override
public PTransformReplacement<PCollection<UserT>, WriteFilesResult<DestinationT>>
getReplacementTransform(
AppliedPTransform<
PCollection<UserT>,
WriteFilesResult<DestinationT>,
WriteFiles<UserT, DestinationT, OutputT>>
transform) {
int numShards;
if (options.getMaxNumWorkers() > 0) {
numShards = options.getMaxNumWorkers() * 2;
} else if (options.getNumWorkers() > 0) {
numShards = options.getNumWorkers() * 2;
} else {
numShards = DEFAULT_NUM_SHARDS;
}
try {
List<PCollectionView<?>> sideInputs =
WriteFilesTranslation.getDynamicDestinationSideInputs(transform);
FileBasedSink sink = WriteFilesTranslation.getSink(transform);
WriteFiles<UserT, DestinationT, OutputT> replacement =
WriteFiles.to(sink).withSideInputs(sideInputs);
if (WriteFilesTranslation.isWindowedWrites(transform)) {
replacement = replacement.withWindowedWrites();
}
return PTransformReplacement.of(
PTransformReplacements.getSingletonMainInput(transform),
replacement.withNumShards(numShards));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public Map<PCollection<?>, ReplacementOutput> mapOutputs(
Map<TupleTag<?>, PCollection<?>> outputs, WriteFilesResult<DestinationT> newOutput) {
return ReplacementOutputs.tagged(outputs, newOutput);
}
} |
To add getDetectedLanguageEnglish(), getDetectedLanguageSpanish(), and getUnknownDetectedLanguage() helper methods. These result repeatedly used over many place, such as atomic operation. | static DetectLanguageResultCollection getExpectedBatchDetectedLanguages() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
final List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(
new DetectLanguageResult("0", new TextDocumentStatistics(26, 1), null, getDetectedLanguageEnglish()),
new DetectLanguageResult("1", new TextDocumentStatistics(40, 1), null, getDetectedLanguageSpanish()),
new DetectLanguageResult("2", new TextDocumentStatistics(6, 1), null, getUnknownDetectedLanguage()));
return new DetectLanguageResultCollection(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3); | static DetectLanguageResultCollection getExpectedBatchDetectedLanguages() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
final List<DetectLanguageResult> detectLanguageResultList = Arrays.asList(
new DetectLanguageResult("0", new TextDocumentStatistics(26, 1), null, getDetectedLanguageEnglish()),
new DetectLanguageResult("1", new TextDocumentStatistics(40, 1), null, getDetectedLanguageSpanish()),
new DetectLanguageResult("2", new TextDocumentStatistics(6, 1), null, getUnknownDetectedLanguage()));
return new DetectLanguageResultCollection(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final String INVALID_URL = "htttttttps:
static final String VALID_HTTPS_LOCALHOST = "https:
static final String FAKE_API_KEY = "1234567890";
static final String AZURE_TEXT_ANALYTICS_API_KEY = "AZURE_TEXT_ANALYTICS_API_KEY";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> CATEGORIZED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final String TOO_LONG_INPUT = "Thisisaveryveryverylongtextwhichgoesonforalongtimeandwhichalmostdoesn'tseemtostopatanygivenpointintime.ThereasonforthistestistotryandseewhathappenswhenwesubmitaveryveryverylongtexttoLanguage.Thisshouldworkjustfinebutjustincaseitisalwaysgoodtohaveatestcase.ThisallowsustotestwhathappensifitisnotOK.Ofcourseitisgoingtobeokbutthenagainitisalsobettertobesure!";
static final List<String> KEY_PHRASE_FRENCH_INPUTS = Arrays.asList(
"Bonjour tout le monde.",
"Je m'appelle Mondly.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un documento escrito en Español.", "~@!~:)");
static final List<String> SPANISH_SAME_AS_ENGLISH_INPUTS = Arrays.asList("personal", "social");
static final DetectedLanguage DETECTED_LANGUAGE_SPANISH = new DetectedLanguage("Spanish", "es", 1.0, null);
static final DetectedLanguage DETECTED_LANGUAGE_ENGLISH = new DetectedLanguage("English", "en", 1.0, null);
static final List<DetectedLanguage> DETECT_SPANISH_LANGUAGE_RESULTS = Arrays.asList(
DETECTED_LANGUAGE_SPANISH, DETECTED_LANGUAGE_SPANISH);
static final List<DetectedLanguage> DETECT_ENGLISH_LANGUAGE_RESULTS = Arrays.asList(
DETECTED_LANGUAGE_ENGLISH, DETECTED_LANGUAGE_ENGLISH);
static final HttpResponseException HTTP_RESPONSE_EXCEPTION_CLASS = new HttpResponseException("", null);
static final String DISPLAY_NAME_WITH_ARGUMENTS = "{displayName} with [{arguments}]";
private static final String AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS =
"AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS";
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1), "US"),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<DetectLanguageInput> getDuplicateIdDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US")
);
}
static List<TextDocumentInput> getDuplicateTextDocumentInputs() {
return Arrays.asList(
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0))
);
}
static List<TextDocumentInput> getWarningsTextDocumentInputs() {
return Arrays.asList(
new TextDocumentInput("0", TOO_LONG_INPUT),
new TextDocumentInput("1", CATEGORIZED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*
* @return A {@link DetectLanguageResultCollection}.
*/
static DetectedLanguage getDetectedLanguageEnglish() {
return new DetectedLanguage("English", "en", 0.0, null);
}
static DetectedLanguage getDetectedLanguageSpanish() {
return new DetectedLanguage("Spanish", "es", 0.0, null);
}
static DetectedLanguage getUnknownDetectedLanguage() {
return new DetectedLanguage("(Unknown)", "(Unknown)", 0.0, null);
}
/**
* Helper method to get the expected Batch Categorized Entities
*
* @return A {@link RecognizeEntitiesResultCollection}.
*/
static RecognizeEntitiesResultCollection getExpectedBatchCategorizedEntities() {
return new RecognizeEntitiesResultCollection(
Arrays.asList(getExpectedBatchCategorizedEntities1(), getExpectedBatchCategorizedEntities2()),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Categorized Entities List 1
*/
static List<CategorizedEntity> getCategorizedEntitiesList1() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("trip", EntityCategory.EVENT, null, 18, 4, 0.0);
CategorizedEntity categorizedEntity2 = new CategorizedEntity("Seattle", EntityCategory.LOCATION, "GPE", 26, 7, 0.0);
CategorizedEntity categorizedEntity3 = new CategorizedEntity("last week", EntityCategory.DATE_TIME, "DateRange", 34, 9, 0.0);
return Arrays.asList(categorizedEntity1, categorizedEntity2, categorizedEntity3);
}
/**
* Helper method to get the expected Categorized Entities List 2
*/
static List<CategorizedEntity> getCategorizedEntitiesList2() {
return Arrays.asList(new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 10, 9, 0.0));
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities1() {
IterableStream<CategorizedEntity> categorizedEntityList1 = new IterableStream<>(getCategorizedEntitiesList1());
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, new CategorizedEntityCollection(categorizedEntityList1, null));
return recognizeEntitiesResult1;
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities2() {
IterableStream<CategorizedEntity> categorizedEntityList2 = new IterableStream<>(getCategorizedEntitiesList2());
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, new CategorizedEntityCollection(categorizedEntityList2, null));
return recognizeEntitiesResult2;
}
/**
* Helper method to get the expected Batch Linked Entities
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*/
static RecognizeLinkedEntitiesResultCollection getExpectedBatchLinkedEntities() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
final List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList =
Arrays.asList(
new RecognizeLinkedEntitiesResult(
"0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult(
"1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null)));
return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected linked Entities List 1
*/
static List<LinkedEntity> getLinkedEntitiesList1() {
final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 26, 7, 0.0);
LinkedEntity linkedEntity = new LinkedEntity(
"Seattle", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Seattle", "https:
"Wikipedia");
return Arrays.asList(linkedEntity);
}
/**
* Helper method to get the expected linked Entities List 2
*/
static List<LinkedEntity> getLinkedEntitiesList2() {
LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 10, 9, 0.0);
LinkedEntity linkedEntity = new LinkedEntity(
"Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Microsoft", "https:
"Wikipedia");
return Arrays.asList(linkedEntity);
}
/**
* Helper method to get the expected Batch Key Phrases
* @return
*/
static ExtractKeyPhrasesResultCollection getExpectedBatchKeyPhrases() {
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, new KeyPhrasesCollection(new IterableStream<>(Arrays.asList("input text", "world")), null));
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, new KeyPhrasesCollection(new IterableStream<>(Collections.singletonList("monde")), null));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
* @return
*/
static AnalyzeSentimentResultCollection getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(67, 1);
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(Arrays.asList(
new SentenceSentiment("", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)),
new SentenceSentiment("", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0))
)), null);
final DocumentSentiment expectedDocumentSentiment2 = new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(Arrays.asList(
new SentenceSentiment("", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)),
new SentenceSentiment("", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0))
)), null);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0",
textDocumentStatistics, null, expectedDocumentSentiment);
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1",
textDocumentStatistics, null, expectedDocumentSentiment2);
return new AnalyzeSentimentResultCollection(
Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Returns a stream of arguments that includes all combinations of eligible {@link HttpClient HttpClients} and
* service versions that should be tested.
*
* @return A stream of HttpClient and service version combinations to test.
*/
static Stream<Arguments> getTestParameters() {
List<Arguments> argumentsList = new ArrayList<>();
getHttpClients()
.forEach(httpClient -> {
Arrays.stream(TextAnalyticsServiceVersion.values()).filter(
TestUtils::shouldServiceVersionBeTested)
.forEach(serviceVersion -> argumentsList.add(Arguments.of(httpClient, serviceVersion)));
});
return argumentsList.stream();
}
/**
* Returns whether the given service version match the rules of test framework.
*
* <ul>
* <li>Using latest service version as default if no environment variable is set.</li>
* <li>If it's set to ALL, all Service versions in {@link TextAnalyticsServiceVersion} will be tested.</li>
* <li>Otherwise, Service version string should match env variable.</li>
* </ul>
*
* Environment values currently supported are: "ALL", "${version}".
* Use comma to separate http clients want to test.
* e.g. {@code set AZURE_TEST_SERVICE_VERSIONS = V1_0, V2_0}
*
* @param serviceVersion ServiceVersion needs to check
* @return Boolean indicates whether filters out the service version or not.
*/
private static boolean shouldServiceVersionBeTested(TextAnalyticsServiceVersion serviceVersion) {
String serviceVersionFromEnv =
Configuration.getGlobalConfiguration().get(AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS);
if (CoreUtils.isNullOrEmpty(serviceVersionFromEnv)) {
return TextAnalyticsServiceVersion.getLatest().equals(serviceVersion);
}
if (AZURE_TEST_SERVICE_VERSIONS_VALUE_ALL.equalsIgnoreCase(serviceVersionFromEnv)) {
return true;
}
String[] configuredServiceVersionList = serviceVersionFromEnv.split(",");
return Arrays.stream(configuredServiceVersionList).anyMatch(configuredServiceVersion ->
serviceVersion.getVersion().equals(configuredServiceVersion.trim()));
}
private TestUtils() {
}
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final String INVALID_URL = "htttttttps:
static final String VALID_HTTPS_LOCALHOST = "https:
static final String FAKE_API_KEY = "1234567890";
static final String AZURE_TEXT_ANALYTICS_API_KEY = "AZURE_TEXT_ANALYTICS_API_KEY";
static final List<String> SENTIMENT_INPUTS = Arrays.asList("The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> CATEGORIZED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> LINKED_ENTITY_INPUTS = Arrays.asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = Arrays.asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final String TOO_LONG_INPUT = "Thisisaveryveryverylongtextwhichgoesonforalongtimeandwhichalmostdoesn'tseemtostopatanygivenpointintime.ThereasonforthistestistotryandseewhathappenswhenwesubmitaveryveryverylongtexttoLanguage.Thisshouldworkjustfinebutjustincaseitisalwaysgoodtohaveatestcase.ThisallowsustotestwhathappensifitisnotOK.Ofcourseitisgoingtobeokbutthenagainitisalsobettertobesure!";
static final List<String> KEY_PHRASE_FRENCH_INPUTS = Arrays.asList(
"Bonjour tout le monde.",
"Je m'appelle Mondly.");
static final List<String> DETECT_LANGUAGE_INPUTS = Arrays.asList(
"This is written in English", "Este es un documento escrito en Español.", "~@!~:)");
static final List<String> SPANISH_SAME_AS_ENGLISH_INPUTS = Arrays.asList("personal", "social");
static final DetectedLanguage DETECTED_LANGUAGE_SPANISH = new DetectedLanguage("Spanish", "es", 1.0, null);
static final DetectedLanguage DETECTED_LANGUAGE_ENGLISH = new DetectedLanguage("English", "en", 1.0, null);
static final List<DetectedLanguage> DETECT_SPANISH_LANGUAGE_RESULTS = Arrays.asList(
DETECTED_LANGUAGE_SPANISH, DETECTED_LANGUAGE_SPANISH);
static final List<DetectedLanguage> DETECT_ENGLISH_LANGUAGE_RESULTS = Arrays.asList(
DETECTED_LANGUAGE_ENGLISH, DETECTED_LANGUAGE_ENGLISH);
static final HttpResponseException HTTP_RESPONSE_EXCEPTION_CLASS = new HttpResponseException("", null);
static final String DISPLAY_NAME_WITH_ARGUMENTS = "{displayName} with [{arguments}]";
private static final String AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS =
"AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS";
static List<DetectLanguageInput> getDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1), "US"),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<DetectLanguageInput> getDuplicateIdDetectLanguageInputs() {
return Arrays.asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US")
);
}
static List<TextDocumentInput> getDuplicateTextDocumentInputs() {
return Arrays.asList(
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0))
);
}
static List<TextDocumentInput> getWarningsTextDocumentInputs() {
return Arrays.asList(
new TextDocumentInput("0", TOO_LONG_INPUT),
new TextDocumentInput("1", CATEGORIZED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*
* @return A {@link DetectLanguageResultCollection}.
*/
static DetectedLanguage getDetectedLanguageEnglish() {
return new DetectedLanguage("English", "en", 0.0, null);
}
static DetectedLanguage getDetectedLanguageSpanish() {
return new DetectedLanguage("Spanish", "es", 0.0, null);
}
static DetectedLanguage getUnknownDetectedLanguage() {
return new DetectedLanguage("(Unknown)", "(Unknown)", 0.0, null);
}
/**
* Helper method to get the expected Batch Categorized Entities
*
* @return A {@link RecognizeEntitiesResultCollection}.
*/
static RecognizeEntitiesResultCollection getExpectedBatchCategorizedEntities() {
return new RecognizeEntitiesResultCollection(
Arrays.asList(getExpectedBatchCategorizedEntities1(), getExpectedBatchCategorizedEntities2()),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Categorized Entities List 1
*/
static List<CategorizedEntity> getCategorizedEntitiesList1() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("trip", EntityCategory.EVENT, null, 0.0, 18, 4);
CategorizedEntity categorizedEntity2 = new CategorizedEntity("Seattle", EntityCategory.LOCATION, "GPE", 0.0, 26, 7);
CategorizedEntity categorizedEntity3 = new CategorizedEntity("last week", EntityCategory.DATE_TIME, "DateRange", 0.0, 34, 9);
return Arrays.asList(categorizedEntity1, categorizedEntity2, categorizedEntity3);
}
/**
* Helper method to get the expected Categorized Entities List 2
*/
static List<CategorizedEntity> getCategorizedEntitiesList2() {
return Arrays.asList(new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0, 10, 9));
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities1() {
IterableStream<CategorizedEntity> categorizedEntityList1 = new IterableStream<>(getCategorizedEntitiesList1());
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, new CategorizedEntityCollection(categorizedEntityList1, null));
return recognizeEntitiesResult1;
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities2() {
IterableStream<CategorizedEntity> categorizedEntityList2 = new IterableStream<>(getCategorizedEntitiesList2());
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, new CategorizedEntityCollection(categorizedEntityList2, null));
return recognizeEntitiesResult2;
}
/**
* Helper method to get the expected Batch Linked Entities
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*/
static RecognizeLinkedEntitiesResultCollection getExpectedBatchLinkedEntities() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
final List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList =
Arrays.asList(
new RecognizeLinkedEntitiesResult(
"0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult(
"1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null)));
return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected linked Entities List 1
*/
static List<LinkedEntity> getLinkedEntitiesList1() {
final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 26, 7, 0.0);
LinkedEntity linkedEntity = new LinkedEntity(
"Seattle", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Seattle", "https:
"Wikipedia");
return Arrays.asList(linkedEntity);
}
/**
* Helper method to get the expected linked Entities List 2
*/
static List<LinkedEntity> getLinkedEntitiesList2() {
LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 10, 9, 0.0);
LinkedEntity linkedEntity = new LinkedEntity(
"Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Microsoft", "https:
"Wikipedia");
return Arrays.asList(linkedEntity);
}
/**
* Helper method to get the expected Batch Key Phrases
* @return
*/
static ExtractKeyPhrasesResultCollection getExpectedBatchKeyPhrases() {
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, new KeyPhrasesCollection(new IterableStream<>(Arrays.asList("input text", "world")), null));
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, new KeyPhrasesCollection(new IterableStream<>(Collections.singletonList("monde")), null));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = Arrays.asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
* @return
*/
static AnalyzeSentimentResultCollection getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(67, 1);
final DocumentSentiment expectedDocumentSentiment = new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(Arrays.asList(
new SentenceSentiment("", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)),
new SentenceSentiment("", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0))
)), null);
final DocumentSentiment expectedDocumentSentiment2 = new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(Arrays.asList(
new SentenceSentiment("", TextSentiment.POSITIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0)),
new SentenceSentiment("", TextSentiment.NEGATIVE, new SentimentConfidenceScores(0.0, 0.0, 0.0))
)), null);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0",
textDocumentStatistics, null, expectedDocumentSentiment);
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1",
textDocumentStatistics, null, expectedDocumentSentiment2);
return new AnalyzeSentimentResultCollection(
Arrays.asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Returns a stream of arguments that includes all combinations of eligible {@link HttpClient HttpClients} and
* service versions that should be tested.
*
* @return A stream of HttpClient and service version combinations to test.
*/
static Stream<Arguments> getTestParameters() {
List<Arguments> argumentsList = new ArrayList<>();
getHttpClients()
.forEach(httpClient -> {
Arrays.stream(TextAnalyticsServiceVersion.values()).filter(
TestUtils::shouldServiceVersionBeTested)
.forEach(serviceVersion -> argumentsList.add(Arguments.of(httpClient, serviceVersion)));
});
return argumentsList.stream();
}
/**
* Returns whether the given service version match the rules of test framework.
*
* <ul>
* <li>Using latest service version as default if no environment variable is set.</li>
* <li>If it's set to ALL, all Service versions in {@link TextAnalyticsServiceVersion} will be tested.</li>
* <li>Otherwise, Service version string should match env variable.</li>
* </ul>
*
* Environment values currently supported are: "ALL", "${version}".
* Use comma to separate http clients want to test.
* e.g. {@code set AZURE_TEST_SERVICE_VERSIONS = V1_0, V2_0}
*
* @param serviceVersion ServiceVersion needs to check
* @return Boolean indicates whether filters out the service version or not.
*/
private static boolean shouldServiceVersionBeTested(TextAnalyticsServiceVersion serviceVersion) {
String serviceVersionFromEnv =
Configuration.getGlobalConfiguration().get(AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS);
if (CoreUtils.isNullOrEmpty(serviceVersionFromEnv)) {
return TextAnalyticsServiceVersion.getLatest().equals(serviceVersion);
}
if (AZURE_TEST_SERVICE_VERSIONS_VALUE_ALL.equalsIgnoreCase(serviceVersionFromEnv)) {
return true;
}
String[] configuredServiceVersionList = serviceVersionFromEnv.split(",");
return Arrays.stream(configuredServiceVersionList).anyMatch(configuredServiceVersion ->
serviceVersion.getVersion().equals(configuredServiceVersion.trim()));
}
private TestUtils() {
}
} |
There should be an `else` to set `transactionQuotaSize` to Config.defaut_xxx; | public int hashCode() {
return Objects.hash(id, fullQualifiedName, dataQuotaBytes);
} | return Objects.hash(id, fullQualifiedName, dataQuotaBytes); | public int hashCode() {
return Objects.hash(id, fullQualifiedName, dataQuotaBytes);
} | class Database extends MetaObject implements Writable, DatabaseIf<Table> {
private static final Logger LOG = LogManager.getLogger(Database.class);
private long id;
private volatile String fullQualifiedName;
private String clusterName;
private ReentrantReadWriteLock rwLock;
private Map<Long, Table> idToTable;
private Map<String, Table> nameToTable;
private Map<String, String> lowerCaseToTableName;
private ConcurrentMap<String, ImmutableList<Function>> name2Function = Maps.newConcurrentMap();
private DatabaseEncryptKey dbEncryptKey;
private volatile long dataQuotaBytes;
private volatile long replicaQuotaSize;
private volatile long transactionQuotaSize;
private volatile boolean isDropped;
public enum DbState {
NORMAL, LINK, MOVE
}
private String attachDbName;
private DbState dbState;
private DatabaseProperty dbProperties = new DatabaseProperty();
public Database() {
this(0, null);
}
public Database(long id, String name) {
this.id = id;
this.fullQualifiedName = name;
if (this.fullQualifiedName == null) {
this.fullQualifiedName = "";
}
this.rwLock = new ReentrantReadWriteLock(true);
this.idToTable = Maps.newConcurrentMap();
this.nameToTable = Maps.newConcurrentMap();
this.lowerCaseToTableName = Maps.newConcurrentMap();
this.dataQuotaBytes = Config.default_db_data_quota_bytes;
this.replicaQuotaSize = Config.default_db_replica_quota_size;
this.transactionQuotaSize = Config.default_db_max_running_txn_num;
this.dbState = DbState.NORMAL;
this.attachDbName = "";
this.clusterName = "";
this.dbEncryptKey = new DatabaseEncryptKey();
}
public void markDropped() {
isDropped = true;
}
public void unmarkDropped() {
isDropped = false;
}
public void readLock() {
this.rwLock.readLock().lock();
}
public void readUnlock() {
this.rwLock.readLock().unlock();
}
public void writeLock() {
this.rwLock.writeLock().lock();
}
public void writeUnlock() {
this.rwLock.writeLock().unlock();
}
public boolean tryWriteLock(long timeout, TimeUnit unit) {
try {
return this.rwLock.writeLock().tryLock(timeout, unit);
} catch (InterruptedException e) {
LOG.warn("failed to try write lock at db[" + id + "]", e);
return false;
}
}
public boolean isWriteLockHeldByCurrentThread() {
return this.rwLock.writeLock().isHeldByCurrentThread();
}
public boolean writeLockIfExist() {
if (!isDropped) {
this.rwLock.writeLock().lock();
return true;
}
return false;
}
public <E extends Exception> void writeLockOrException(E e) throws E {
writeLock();
if (isDropped) {
writeUnlock();
throw e;
}
}
public void writeLockOrDdlException() throws DdlException {
writeLockOrException(new DdlException("unknown db, dbName=" + fullQualifiedName));
}
public long getId() {
return id;
}
public String getFullName() {
return fullQualifiedName;
}
public void setNameWithLock(String newName) {
writeLock();
try {
this.fullQualifiedName = newName;
for (Table table : idToTable.values()) {
table.setQualifiedDbName(fullQualifiedName);
}
} finally {
writeUnlock();
}
}
public void setDataQuota(long newQuota) {
Preconditions.checkArgument(newQuota >= 0L);
LOG.info("database[{}] set quota from {} to {}", fullQualifiedName, dataQuotaBytes, newQuota);
this.dataQuotaBytes = newQuota;
}
public void setReplicaQuota(long newQuota) {
Preconditions.checkArgument(newQuota >= 0L);
LOG.info("database[{}] set replica quota from {} to {}", fullQualifiedName, replicaQuotaSize, newQuota);
this.replicaQuotaSize = newQuota;
}
public void setTransactionQuotaSize(long newQuota) {
Preconditions.checkArgument(newQuota >= 0L);
LOG.info("database[{}] set transaction quota from {} to {}", fullQualifiedName, transactionQuotaSize, newQuota);
this.transactionQuotaSize = newQuota;
}
public long getDataQuota() {
return dataQuotaBytes;
}
public long getReplicaQuota() {
return replicaQuotaSize;
}
public long getTransactionQuotaSize() {
return transactionQuotaSize;
}
public DatabaseProperty getDbProperties() {
return dbProperties;
}
public void setDbProperties(DatabaseProperty dbProperties) {
this.dbProperties = dbProperties;
}
public long getUsedDataQuotaWithLock() {
long usedDataQuota = 0;
readLock();
try {
for (Table table : this.idToTable.values()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
usedDataQuota = usedDataQuota + olapTable.getDataSize();
} finally {
olapTable.readUnlock();
}
}
return usedDataQuota;
} finally {
readUnlock();
}
}
public long getReplicaCountWithLock() {
readLock();
try {
long usedReplicaCount = 0;
for (Table table : this.idToTable.values()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
usedReplicaCount = usedReplicaCount + olapTable.getReplicaCount();
} finally {
olapTable.readUnlock();
}
}
return usedReplicaCount;
} finally {
readUnlock();
}
}
public long getReplicaQuotaLeftWithLock() {
long leftReplicaQuota = replicaQuotaSize - getReplicaCountWithLock();
return Math.max(leftReplicaQuota, 0L);
}
public void checkDataSizeQuota() throws DdlException {
Pair<Double, String> quotaUnitPair = DebugUtil.getByteUint(dataQuotaBytes);
String readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaUnitPair.first) + " "
+ quotaUnitPair.second;
long usedDataQuota = getUsedDataQuotaWithLock();
long leftDataQuota = Math.max(dataQuotaBytes - usedDataQuota, 0);
Pair<Double, String> leftQuotaUnitPair = DebugUtil.getByteUint(leftDataQuota);
String readableLeftQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftQuotaUnitPair.first) + " "
+ leftQuotaUnitPair.second;
LOG.info("database[{}] data quota: left bytes: {} / total: {}",
fullQualifiedName, readableLeftQuota, readableQuota);
if (leftDataQuota <= 0L) {
throw new DdlException("Database[" + fullQualifiedName
+ "] data size exceeds quota[" + readableQuota + "]");
}
}
public void checkReplicaQuota() throws DdlException {
long leftReplicaQuota = getReplicaQuotaLeftWithLock();
LOG.info("database[{}] replica quota: left number: {} / total: {}",
fullQualifiedName, leftReplicaQuota, replicaQuotaSize);
if (leftReplicaQuota <= 0L) {
throw new DdlException("Database[" + fullQualifiedName
+ "] replica number exceeds quota[" + replicaQuotaSize + "]");
}
}
public void checkQuota() throws DdlException {
checkDataSizeQuota();
checkReplicaQuota();
}
public boolean isTableExist(String tableName) {
if (Env.isTableNamesCaseInsensitive()) {
tableName = lowerCaseToTableName.get(tableName.toLowerCase());
if (tableName == null) {
return false;
}
}
return nameToTable.containsKey(tableName);
}
public Pair<Boolean, Boolean> createTableWithLock(
Table table, boolean isReplay, boolean setIfNotExist) throws DdlException {
boolean result = true;
boolean isTableExist = false;
table.setQualifiedDbName(fullQualifiedName);
writeLockOrDdlException();
try {
String tableName = table.getName();
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
if (isTableExist(tableName)) {
result = setIfNotExist;
isTableExist = true;
} else {
idToTable.put(table.getId(), table);
nameToTable.put(table.getName(), table);
lowerCaseToTableName.put(tableName.toLowerCase(), tableName);
if (!isReplay) {
CreateTableInfo info = new CreateTableInfo(fullQualifiedName, table);
Env.getCurrentEnv().getEditLog().logCreateTable(info);
}
if (table.getType() == TableType.ELASTICSEARCH) {
Env.getCurrentEnv().getEsRepository().registerTable((EsTable) table);
}
}
return Pair.of(result, isTableExist);
} finally {
writeUnlock();
}
}
public boolean createTable(Table table) {
boolean result = true;
table.setQualifiedDbName(fullQualifiedName);
String tableName = table.getName();
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
if (isTableExist(tableName)) {
result = false;
} else {
idToTable.put(table.getId(), table);
nameToTable.put(table.getName(), table);
lowerCaseToTableName.put(tableName.toLowerCase(), tableName);
}
table.unmarkDropped();
return result;
}
public void dropTable(String tableName) {
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
Table table = getTableNullable(tableName);
if (table != null) {
this.nameToTable.remove(tableName);
this.idToTable.remove(table.getId());
this.lowerCaseToTableName.remove(tableName.toLowerCase());
table.markDropped();
}
}
public List<Table> getTables() {
return new ArrayList<>(idToTable.values());
}
public List<Table> getTablesOnIdOrder() {
return idToTable.values().stream()
.sorted(Comparator.comparing(Table::getId))
.collect(Collectors.toList());
}
public List<Table> getViews() {
List<Table> views = new ArrayList<>();
for (Table table : idToTable.values()) {
if (table.getType() == TableType.VIEW) {
views.add(table);
}
}
return views;
}
/**
* this method is used for get existed table list by table id list, if table not exist, just ignore it.
*/
public List<Table> getTablesOnIdOrderIfExist(List<Long> tableIdList) {
List<Table> tableList = Lists.newArrayListWithCapacity(tableIdList.size());
for (Long tableId : tableIdList) {
Table table = idToTable.get(tableId);
if (table != null) {
tableList.add(table);
}
}
if (tableList.size() > 1) {
return tableList.stream().sorted(Comparator.comparing(Table::getId)).collect(Collectors.toList());
}
return tableList;
}
public List<Table> getTablesOnIdOrderOrThrowException(List<Long> tableIdList) throws MetaNotFoundException {
List<Table> tableList = Lists.newArrayListWithCapacity(tableIdList.size());
for (Long tableId : tableIdList) {
Table table = idToTable.get(tableId);
if (table == null) {
throw new MetaNotFoundException("unknown table, tableId=" + tableId);
}
tableList.add(table);
}
if (tableList.size() > 1) {
return tableList.stream().sorted(Comparator.comparing(Table::getId)).collect(Collectors.toList());
}
return tableList;
}
public Set<String> getTableNamesWithLock() {
readLock();
try {
return new HashSet<>(this.nameToTable.keySet());
} finally {
readUnlock();
}
}
/**
* This is a thread-safe method when nameToTable is a concurrent hash map
*/
@Override
public Table getTableNullable(String tableName) {
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
if (Env.isTableNamesCaseInsensitive()) {
tableName = lowerCaseToTableName.get(tableName.toLowerCase());
if (tableName == null) {
return null;
}
}
return nameToTable.get(tableName);
}
/**
* This is a thread-safe method when idToTable is a concurrent hash map
*/
@Override
public Table getTableNullable(long tableId) {
return idToTable.get(tableId);
}
public int getMaxReplicationNum() {
int ret = 0;
readLock();
try {
for (Table table : idToTable.values()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
table.readLock();
try {
for (Partition partition : olapTable.getAllPartitions()) {
short replicationNum = olapTable.getPartitionInfo()
.getReplicaAllocation(partition.getId()).getTotalReplicaNum();
if (ret < replicationNum) {
ret = replicationNum;
}
}
} finally {
table.readUnlock();
}
}
} finally {
readUnlock();
}
return ret;
}
public static Database read(DataInput in) throws IOException {
Database db = new Database();
db.readFields(in);
return db;
}
@Override
public String getSignature(int signatureVersion) {
StringBuilder sb = new StringBuilder(signatureVersion);
sb.append(fullQualifiedName);
String md5 = DigestUtils.md5Hex(sb.toString());
LOG.debug("get signature of database {}: {}. signature string: {}", fullQualifiedName, md5, sb.toString());
return md5;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
out.writeLong(id);
Text.writeString(out, fullQualifiedName);
int numTables = nameToTable.size();
out.writeInt(numTables);
for (Map.Entry<String, Table> entry : nameToTable.entrySet()) {
entry.getValue().write(out);
}
out.writeLong(dataQuotaBytes);
Text.writeString(out, clusterName);
Text.writeString(out, dbState.name());
Text.writeString(out, attachDbName);
out.writeInt(name2Function.size());
for (Entry<String, ImmutableList<Function>> entry : name2Function.entrySet()) {
Text.writeString(out, entry.getKey());
out.writeInt(entry.getValue().size());
for (Function function : entry.getValue()) {
function.write(out);
}
}
dbEncryptKey.write(out);
out.writeLong(replicaQuotaSize);
dbProperties.write(out);
out.writeLong(transactionQuotaSize);
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
id = in.readLong();
fullQualifiedName = Text.readString(in);
int numTables = in.readInt();
for (int i = 0; i < numTables; ++i) {
Table table = Table.read(in);
table.setQualifiedDbName(fullQualifiedName);
String tableName = table.getName();
nameToTable.put(tableName, table);
idToTable.put(table.getId(), table);
lowerCaseToTableName.put(tableName.toLowerCase(), tableName);
}
dataQuotaBytes = in.readLong();
clusterName = Text.readString(in);
dbState = DbState.valueOf(Text.readString(in));
attachDbName = Text.readString(in);
int numEntries = in.readInt();
for (int i = 0; i < numEntries; ++i) {
String name = Text.readString(in);
ImmutableList.Builder<Function> builder = ImmutableList.builder();
int numFunctions = in.readInt();
for (int j = 0; j < numFunctions; ++j) {
builder.add(Function.read(in));
}
name2Function.put(name, builder.build());
}
if (Env.getCurrentEnvJournalVersion() >= FeMetaVersion.VERSION_102) {
dbEncryptKey = DatabaseEncryptKey.read(in);
}
replicaQuotaSize = in.readLong();
if (Env.getCurrentEnvJournalVersion() >= FeMetaVersion.VERSION_105) {
dbProperties = DatabaseProperty.read(in);
}
if (Env.getCurrentEnvJournalVersion() >= FeMetaVersion.VERSION_116) {
transactionQuotaSize = in.readLong();
}
}
@Override
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Database)) {
return false;
}
Database other = (Database) obj;
return id == other.id
&& idToTable.equals(other.idToTable)
&& fullQualifiedName.equals(other.fullQualifiedName)
&& dataQuotaBytes == other.dataQuotaBytes;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public DbState getDbState() {
return dbState;
}
public void setDbState(DbState dbState) {
if (dbState == null) {
return;
}
this.dbState = dbState;
}
public void setAttachDb(String name) {
this.attachDbName = name;
}
public String getAttachDb() {
return this.attachDbName;
}
public void setName(String name) {
this.fullQualifiedName = name;
for (Table table : nameToTable.values()) {
table.setQualifiedDbName(name);
}
}
public synchronized void addFunction(Function function, boolean ifNotExists) throws UserException {
function.checkWritable();
if (addFunctionImpl(function, ifNotExists, false)) {
Env.getCurrentEnv().getEditLog().logAddFunction(function);
}
}
public synchronized void replayAddFunction(Function function) {
try {
addFunctionImpl(function, false, true);
} catch (UserException e) {
throw new RuntimeException(e);
}
}
/**
* @param function
* @param ifNotExists
* @param isReplay
* @return return true if we do add the function, otherwise, return false.
* @throws UserException
*/
private boolean addFunctionImpl(Function function, boolean ifNotExists, boolean isReplay) throws UserException {
String functionName = function.getFunctionName().getFunction();
List<Function> existFuncs = name2Function.get(functionName);
if (!isReplay) {
if (existFuncs != null) {
for (Function existFunc : existFuncs) {
if (function.compare(existFunc, Function.CompareMode.IS_IDENTICAL)) {
if (ifNotExists) {
LOG.debug("function already exists");
return false;
}
throw new UserException("function already exists");
}
}
}
long functionId = Env.getCurrentEnv().getNextId();
function.setId(functionId);
}
ImmutableList.Builder<Function> builder = ImmutableList.builder();
if (existFuncs != null) {
builder.addAll(existFuncs);
}
builder.add(function);
name2Function.put(functionName, builder.build());
return true;
}
public synchronized void dropFunction(FunctionSearchDesc function, boolean ifExists) throws UserException {
if (dropFunctionImpl(function, ifExists)) {
Env.getCurrentEnv().getEditLog().logDropFunction(function);
}
}
public synchronized void replayDropFunction(FunctionSearchDesc functionSearchDesc) {
try {
dropFunctionImpl(functionSearchDesc, false);
} catch (UserException e) {
throw new RuntimeException(e);
}
}
/**
* @param function
* @param ifExists
* @return return true if we do drop the function, otherwise, return false.
* @throws UserException
*/
private boolean dropFunctionImpl(FunctionSearchDesc function, boolean ifExists) throws UserException {
String functionName = function.getName().getFunction();
List<Function> existFuncs = name2Function.get(functionName);
if (existFuncs == null) {
if (ifExists) {
LOG.debug("function name does not exist: " + functionName);
return false;
}
throw new UserException("function name does not exist: " + functionName);
}
boolean isFound = false;
ImmutableList.Builder<Function> builder = ImmutableList.builder();
for (Function existFunc : existFuncs) {
if (function.isIdentical(existFunc)) {
isFound = true;
} else {
builder.add(existFunc);
}
}
if (!isFound) {
if (ifExists) {
LOG.debug("function does not exist: " + function);
return false;
}
throw new UserException("function does not exist: " + function);
}
ImmutableList<Function> newFunctions = builder.build();
if (newFunctions.isEmpty()) {
name2Function.remove(functionName);
} else {
name2Function.put(functionName, newFunctions);
}
return true;
}
public synchronized Function getFunction(Function desc, Function.CompareMode mode) {
List<Function> fns = name2Function.get(desc.getFunctionName().getFunction());
if (fns == null) {
return null;
}
return Function.getFunction(fns, desc, mode);
}
public synchronized Function getFunction(FunctionSearchDesc function) throws AnalysisException {
String functionName = function.getName().getFunction();
List<Function> existFuncs = name2Function.get(functionName);
if (existFuncs == null) {
throw new AnalysisException("Unknown function, function=" + function.toString());
}
for (Function existFunc : existFuncs) {
if (function.isIdentical(existFunc)) {
return existFunc;
}
}
throw new AnalysisException("Unknown function, function=" + function.toString());
}
public synchronized List<Function> getFunctions() {
List<Function> functions = Lists.newArrayList();
for (Map.Entry<String, ImmutableList<Function>> entry : name2Function.entrySet()) {
functions.addAll(entry.getValue());
}
return functions;
}
public boolean isInfoSchemaDb() {
return ClusterNamespace.getNameFromFullName(fullQualifiedName).equalsIgnoreCase(InfoSchemaDb.DATABASE_NAME);
}
public synchronized void addEncryptKey(EncryptKey encryptKey, boolean ifNotExists) throws UserException {
if (addEncryptKeyImpl(encryptKey, false, ifNotExists)) {
Env.getCurrentEnv().getEditLog().logAddEncryptKey(encryptKey);
}
}
public synchronized void replayAddEncryptKey(EncryptKey encryptKey) {
try {
addEncryptKeyImpl(encryptKey, true, true);
} catch (UserException e) {
Preconditions.checkArgument(false);
}
}
private boolean addEncryptKeyImpl(EncryptKey encryptKey, boolean isReplay, boolean ifNotExists)
throws UserException {
String keyName = encryptKey.getEncryptKeyName().getKeyName();
EncryptKey existKey = dbEncryptKey.getName2EncryptKey().get(keyName);
if (!isReplay) {
if (existKey != null) {
if (existKey.isIdentical(encryptKey)) {
if (ifNotExists) {
return false;
}
throw new UserException("encryptKey ["
+ existKey.getEncryptKeyName().toString() + "] already exists");
}
}
}
dbEncryptKey.getName2EncryptKey().put(keyName, encryptKey);
return true;
}
public synchronized void dropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc, boolean ifExists)
throws UserException {
if (dropEncryptKeyImpl(encryptKeySearchDesc, ifExists)) {
Env.getCurrentEnv().getEditLog().logDropEncryptKey(encryptKeySearchDesc);
}
}
public synchronized void replayDropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc) {
try {
dropEncryptKeyImpl(encryptKeySearchDesc, true);
} catch (UserException e) {
Preconditions.checkArgument(false);
}
}
private boolean dropEncryptKeyImpl(EncryptKeySearchDesc encryptKeySearchDesc, boolean ifExists)
throws UserException {
String keyName = encryptKeySearchDesc.getKeyEncryptKeyName().getKeyName();
EncryptKey existKey = dbEncryptKey.getName2EncryptKey().get(keyName);
if (existKey == null) {
if (ifExists) {
return false;
}
throw new UserException("Unknown encryptKey, encryptKey=" + encryptKeySearchDesc.toString());
}
boolean isFound = false;
if (encryptKeySearchDesc.isIdentical(existKey)) {
isFound = true;
}
if (!isFound) {
if (ifExists) {
return false;
}
throw new UserException("Unknown encryptKey, encryptKey=" + encryptKeySearchDesc.toString());
}
dbEncryptKey.getName2EncryptKey().remove(keyName);
return true;
}
public synchronized List<EncryptKey> getEncryptKeys() {
List<EncryptKey> encryptKeys = Lists.newArrayList();
for (Map.Entry<String, EncryptKey> entry : dbEncryptKey.getName2EncryptKey().entrySet()) {
encryptKeys.add(entry.getValue());
}
return encryptKeys;
}
public synchronized EncryptKey getEncryptKey(String keyName) {
if (dbEncryptKey.getName2EncryptKey().containsKey(keyName)) {
return dbEncryptKey.getName2EncryptKey().get(keyName);
}
return null;
}
} | class Database extends MetaObject implements Writable, DatabaseIf<Table> {
private static final Logger LOG = LogManager.getLogger(Database.class);
private static final String TRANSACTION_QUOTA_SIZE = "transactionQuotaSize";
private long id;
private volatile String fullQualifiedName;
private String clusterName;
private ReentrantReadWriteLock rwLock;
private Map<Long, Table> idToTable;
private Map<String, Table> nameToTable;
private Map<String, String> lowerCaseToTableName;
private ConcurrentMap<String, ImmutableList<Function>> name2Function = Maps.newConcurrentMap();
private DatabaseEncryptKey dbEncryptKey;
private volatile long dataQuotaBytes;
private volatile long replicaQuotaSize;
private volatile long transactionQuotaSize;
private volatile boolean isDropped;
public enum DbState {
NORMAL, LINK, MOVE
}
private String attachDbName;
private DbState dbState;
private DatabaseProperty dbProperties = new DatabaseProperty();
public Database() {
this(0, null);
}
public Database(long id, String name) {
this.id = id;
this.fullQualifiedName = name;
if (this.fullQualifiedName == null) {
this.fullQualifiedName = "";
}
this.rwLock = new ReentrantReadWriteLock(true);
this.idToTable = Maps.newConcurrentMap();
this.nameToTable = Maps.newConcurrentMap();
this.lowerCaseToTableName = Maps.newConcurrentMap();
this.dataQuotaBytes = Config.default_db_data_quota_bytes;
this.replicaQuotaSize = Config.default_db_replica_quota_size;
this.transactionQuotaSize = Config.default_db_max_running_txn_num == -1L
? Config.max_running_txn_num_per_db
: Config.default_db_max_running_txn_num;
this.dbState = DbState.NORMAL;
this.attachDbName = "";
this.clusterName = "";
this.dbEncryptKey = new DatabaseEncryptKey();
}
public void markDropped() {
isDropped = true;
}
public void unmarkDropped() {
isDropped = false;
}
public void readLock() {
this.rwLock.readLock().lock();
}
public void readUnlock() {
this.rwLock.readLock().unlock();
}
public void writeLock() {
this.rwLock.writeLock().lock();
}
public void writeUnlock() {
this.rwLock.writeLock().unlock();
}
public boolean tryWriteLock(long timeout, TimeUnit unit) {
try {
return this.rwLock.writeLock().tryLock(timeout, unit);
} catch (InterruptedException e) {
LOG.warn("failed to try write lock at db[" + id + "]", e);
return false;
}
}
public boolean isWriteLockHeldByCurrentThread() {
return this.rwLock.writeLock().isHeldByCurrentThread();
}
public boolean writeLockIfExist() {
if (!isDropped) {
this.rwLock.writeLock().lock();
return true;
}
return false;
}
public <E extends Exception> void writeLockOrException(E e) throws E {
writeLock();
if (isDropped) {
writeUnlock();
throw e;
}
}
public void writeLockOrDdlException() throws DdlException {
writeLockOrException(new DdlException("unknown db, dbName=" + fullQualifiedName));
}
public long getId() {
return id;
}
public String getFullName() {
return fullQualifiedName;
}
public void setNameWithLock(String newName) {
writeLock();
try {
this.fullQualifiedName = newName;
for (Table table : idToTable.values()) {
table.setQualifiedDbName(fullQualifiedName);
}
} finally {
writeUnlock();
}
}
public void setDataQuota(long newQuota) {
Preconditions.checkArgument(newQuota >= 0L);
LOG.info("database[{}] set quota from {} to {}", fullQualifiedName, dataQuotaBytes, newQuota);
this.dataQuotaBytes = newQuota;
}
public void setReplicaQuota(long newQuota) {
Preconditions.checkArgument(newQuota >= 0L);
LOG.info("database[{}] set replica quota from {} to {}", fullQualifiedName, replicaQuotaSize, newQuota);
this.replicaQuotaSize = newQuota;
}
public void setTransactionQuotaSize(long newQuota) {
writeLock();
try {
Preconditions.checkArgument(newQuota >= 0L);
LOG.info("database[{}] try to set transaction quota from {} to {}",
fullQualifiedName, transactionQuotaSize, newQuota);
this.transactionQuotaSize = newQuota;
this.dbProperties.put(TRANSACTION_QUOTA_SIZE, String.valueOf(transactionQuotaSize));
} finally {
writeUnlock();
}
}
public long getDataQuota() {
return dataQuotaBytes;
}
public long getReplicaQuota() {
return replicaQuotaSize;
}
public long getTransactionQuotaSize() {
return transactionQuotaSize;
}
public DatabaseProperty getDbProperties() {
return dbProperties;
}
public void setDbProperties(DatabaseProperty dbProperties) {
this.dbProperties = dbProperties;
}
public long getUsedDataQuotaWithLock() {
long usedDataQuota = 0;
readLock();
try {
for (Table table : this.idToTable.values()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
usedDataQuota = usedDataQuota + olapTable.getDataSize();
} finally {
olapTable.readUnlock();
}
}
return usedDataQuota;
} finally {
readUnlock();
}
}
public long getReplicaCountWithLock() {
readLock();
try {
long usedReplicaCount = 0;
for (Table table : this.idToTable.values()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
olapTable.readLock();
try {
usedReplicaCount = usedReplicaCount + olapTable.getReplicaCount();
} finally {
olapTable.readUnlock();
}
}
return usedReplicaCount;
} finally {
readUnlock();
}
}
public long getReplicaQuotaLeftWithLock() {
long leftReplicaQuota = replicaQuotaSize - getReplicaCountWithLock();
return Math.max(leftReplicaQuota, 0L);
}
public void checkDataSizeQuota() throws DdlException {
Pair<Double, String> quotaUnitPair = DebugUtil.getByteUint(dataQuotaBytes);
String readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaUnitPair.first) + " "
+ quotaUnitPair.second;
long usedDataQuota = getUsedDataQuotaWithLock();
long leftDataQuota = Math.max(dataQuotaBytes - usedDataQuota, 0);
Pair<Double, String> leftQuotaUnitPair = DebugUtil.getByteUint(leftDataQuota);
String readableLeftQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftQuotaUnitPair.first) + " "
+ leftQuotaUnitPair.second;
LOG.info("database[{}] data quota: left bytes: {} / total: {}",
fullQualifiedName, readableLeftQuota, readableQuota);
if (leftDataQuota <= 0L) {
throw new DdlException("Database[" + fullQualifiedName
+ "] data size exceeds quota[" + readableQuota + "]");
}
}
public void checkReplicaQuota() throws DdlException {
long leftReplicaQuota = getReplicaQuotaLeftWithLock();
LOG.info("database[{}] replica quota: left number: {} / total: {}",
fullQualifiedName, leftReplicaQuota, replicaQuotaSize);
if (leftReplicaQuota <= 0L) {
throw new DdlException("Database[" + fullQualifiedName
+ "] replica number exceeds quota[" + replicaQuotaSize + "]");
}
}
public void checkQuota() throws DdlException {
checkDataSizeQuota();
checkReplicaQuota();
}
public boolean isTableExist(String tableName) {
if (Env.isTableNamesCaseInsensitive()) {
tableName = lowerCaseToTableName.get(tableName.toLowerCase());
if (tableName == null) {
return false;
}
}
return nameToTable.containsKey(tableName);
}
public Pair<Boolean, Boolean> createTableWithLock(
Table table, boolean isReplay, boolean setIfNotExist) throws DdlException {
boolean result = true;
boolean isTableExist = false;
table.setQualifiedDbName(fullQualifiedName);
writeLockOrDdlException();
try {
String tableName = table.getName();
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
if (isTableExist(tableName)) {
result = setIfNotExist;
isTableExist = true;
} else {
idToTable.put(table.getId(), table);
nameToTable.put(table.getName(), table);
lowerCaseToTableName.put(tableName.toLowerCase(), tableName);
if (!isReplay) {
CreateTableInfo info = new CreateTableInfo(fullQualifiedName, table);
Env.getCurrentEnv().getEditLog().logCreateTable(info);
}
if (table.getType() == TableType.ELASTICSEARCH) {
Env.getCurrentEnv().getEsRepository().registerTable((EsTable) table);
}
}
return Pair.of(result, isTableExist);
} finally {
writeUnlock();
}
}
public boolean createTable(Table table) {
boolean result = true;
table.setQualifiedDbName(fullQualifiedName);
String tableName = table.getName();
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
if (isTableExist(tableName)) {
result = false;
} else {
idToTable.put(table.getId(), table);
nameToTable.put(table.getName(), table);
lowerCaseToTableName.put(tableName.toLowerCase(), tableName);
}
table.unmarkDropped();
return result;
}
public void dropTable(String tableName) {
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
Table table = getTableNullable(tableName);
if (table != null) {
this.nameToTable.remove(tableName);
this.idToTable.remove(table.getId());
this.lowerCaseToTableName.remove(tableName.toLowerCase());
table.markDropped();
}
}
public List<Table> getTables() {
return new ArrayList<>(idToTable.values());
}
public List<Table> getTablesOnIdOrder() {
return idToTable.values().stream()
.sorted(Comparator.comparing(Table::getId))
.collect(Collectors.toList());
}
public List<Table> getViews() {
List<Table> views = new ArrayList<>();
for (Table table : idToTable.values()) {
if (table.getType() == TableType.VIEW) {
views.add(table);
}
}
return views;
}
/**
* this method is used for get existed table list by table id list, if table not exist, just ignore it.
*/
public List<Table> getTablesOnIdOrderIfExist(List<Long> tableIdList) {
List<Table> tableList = Lists.newArrayListWithCapacity(tableIdList.size());
for (Long tableId : tableIdList) {
Table table = idToTable.get(tableId);
if (table != null) {
tableList.add(table);
}
}
if (tableList.size() > 1) {
return tableList.stream().sorted(Comparator.comparing(Table::getId)).collect(Collectors.toList());
}
return tableList;
}
public List<Table> getTablesOnIdOrderOrThrowException(List<Long> tableIdList) throws MetaNotFoundException {
List<Table> tableList = Lists.newArrayListWithCapacity(tableIdList.size());
for (Long tableId : tableIdList) {
Table table = idToTable.get(tableId);
if (table == null) {
throw new MetaNotFoundException("unknown table, tableId=" + tableId);
}
tableList.add(table);
}
if (tableList.size() > 1) {
return tableList.stream().sorted(Comparator.comparing(Table::getId)).collect(Collectors.toList());
}
return tableList;
}
public Set<String> getTableNamesWithLock() {
readLock();
try {
return new HashSet<>(this.nameToTable.keySet());
} finally {
readUnlock();
}
}
/**
* This is a thread-safe method when nameToTable is a concurrent hash map
*/
@Override
public Table getTableNullable(String tableName) {
if (Env.isStoredTableNamesLowerCase()) {
tableName = tableName.toLowerCase();
}
if (Env.isTableNamesCaseInsensitive()) {
tableName = lowerCaseToTableName.get(tableName.toLowerCase());
if (tableName == null) {
return null;
}
}
return nameToTable.get(tableName);
}
/**
* This is a thread-safe method when idToTable is a concurrent hash map
*/
@Override
public Table getTableNullable(long tableId) {
return idToTable.get(tableId);
}
public int getMaxReplicationNum() {
int ret = 0;
readLock();
try {
for (Table table : idToTable.values()) {
if (table.getType() != TableType.OLAP) {
continue;
}
OlapTable olapTable = (OlapTable) table;
table.readLock();
try {
for (Partition partition : olapTable.getAllPartitions()) {
short replicationNum = olapTable.getPartitionInfo()
.getReplicaAllocation(partition.getId()).getTotalReplicaNum();
if (ret < replicationNum) {
ret = replicationNum;
}
}
} finally {
table.readUnlock();
}
}
} finally {
readUnlock();
}
return ret;
}
public static Database read(DataInput in) throws IOException {
Database db = new Database();
db.readFields(in);
return db;
}
@Override
public String getSignature(int signatureVersion) {
StringBuilder sb = new StringBuilder(signatureVersion);
sb.append(fullQualifiedName);
String md5 = DigestUtils.md5Hex(sb.toString());
LOG.debug("get signature of database {}: {}. signature string: {}", fullQualifiedName, md5, sb.toString());
return md5;
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
out.writeLong(id);
Text.writeString(out, fullQualifiedName);
int numTables = nameToTable.size();
out.writeInt(numTables);
for (Map.Entry<String, Table> entry : nameToTable.entrySet()) {
entry.getValue().write(out);
}
out.writeLong(dataQuotaBytes);
Text.writeString(out, clusterName);
Text.writeString(out, dbState.name());
Text.writeString(out, attachDbName);
out.writeInt(name2Function.size());
for (Entry<String, ImmutableList<Function>> entry : name2Function.entrySet()) {
Text.writeString(out, entry.getKey());
out.writeInt(entry.getValue().size());
for (Function function : entry.getValue()) {
function.write(out);
}
}
dbEncryptKey.write(out);
out.writeLong(replicaQuotaSize);
dbProperties.write(out);
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
id = in.readLong();
fullQualifiedName = Text.readString(in);
int numTables = in.readInt();
for (int i = 0; i < numTables; ++i) {
Table table = Table.read(in);
table.setQualifiedDbName(fullQualifiedName);
String tableName = table.getName();
nameToTable.put(tableName, table);
idToTable.put(table.getId(), table);
lowerCaseToTableName.put(tableName.toLowerCase(), tableName);
}
dataQuotaBytes = in.readLong();
clusterName = Text.readString(in);
dbState = DbState.valueOf(Text.readString(in));
attachDbName = Text.readString(in);
int numEntries = in.readInt();
for (int i = 0; i < numEntries; ++i) {
String name = Text.readString(in);
ImmutableList.Builder<Function> builder = ImmutableList.builder();
int numFunctions = in.readInt();
for (int j = 0; j < numFunctions; ++j) {
builder.add(Function.read(in));
}
name2Function.put(name, builder.build());
}
if (Env.getCurrentEnvJournalVersion() >= FeMetaVersion.VERSION_102) {
dbEncryptKey = DatabaseEncryptKey.read(in);
}
replicaQuotaSize = in.readLong();
if (Env.getCurrentEnvJournalVersion() >= FeMetaVersion.VERSION_105) {
dbProperties = DatabaseProperty.read(in);
String txnQuotaStr = dbProperties.getOrDefault(TRANSACTION_QUOTA_SIZE,
String.valueOf(Config.max_running_txn_num_per_db));
transactionQuotaSize = Long.parseLong(txnQuotaStr);
} else {
transactionQuotaSize = Config.default_db_max_running_txn_num == -1L
? Config.max_running_txn_num_per_db
: Config.default_db_max_running_txn_num;
}
}
@Override
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Database)) {
return false;
}
Database other = (Database) obj;
return id == other.id
&& idToTable.equals(other.idToTable)
&& fullQualifiedName.equals(other.fullQualifiedName)
&& dataQuotaBytes == other.dataQuotaBytes;
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public DbState getDbState() {
return dbState;
}
public void setDbState(DbState dbState) {
if (dbState == null) {
return;
}
this.dbState = dbState;
}
public void setAttachDb(String name) {
this.attachDbName = name;
}
public String getAttachDb() {
return this.attachDbName;
}
public void setName(String name) {
this.fullQualifiedName = name;
for (Table table : nameToTable.values()) {
table.setQualifiedDbName(name);
}
}
public synchronized void addFunction(Function function, boolean ifNotExists) throws UserException {
function.checkWritable();
if (addFunctionImpl(function, ifNotExists, false)) {
Env.getCurrentEnv().getEditLog().logAddFunction(function);
}
}
public synchronized void replayAddFunction(Function function) {
try {
addFunctionImpl(function, false, true);
} catch (UserException e) {
throw new RuntimeException(e);
}
}
/**
* @param function
* @param ifNotExists
* @param isReplay
* @return return true if we do add the function, otherwise, return false.
* @throws UserException
*/
private boolean addFunctionImpl(Function function, boolean ifNotExists, boolean isReplay) throws UserException {
String functionName = function.getFunctionName().getFunction();
List<Function> existFuncs = name2Function.get(functionName);
if (!isReplay) {
if (existFuncs != null) {
for (Function existFunc : existFuncs) {
if (function.compare(existFunc, Function.CompareMode.IS_IDENTICAL)) {
if (ifNotExists) {
LOG.debug("function already exists");
return false;
}
throw new UserException("function already exists");
}
}
}
long functionId = Env.getCurrentEnv().getNextId();
function.setId(functionId);
}
ImmutableList.Builder<Function> builder = ImmutableList.builder();
if (existFuncs != null) {
builder.addAll(existFuncs);
}
builder.add(function);
name2Function.put(functionName, builder.build());
return true;
}
public synchronized void dropFunction(FunctionSearchDesc function, boolean ifExists) throws UserException {
if (dropFunctionImpl(function, ifExists)) {
Env.getCurrentEnv().getEditLog().logDropFunction(function);
}
}
public synchronized void replayDropFunction(FunctionSearchDesc functionSearchDesc) {
try {
dropFunctionImpl(functionSearchDesc, false);
} catch (UserException e) {
throw new RuntimeException(e);
}
}
/**
* @param function
* @param ifExists
* @return return true if we do drop the function, otherwise, return false.
* @throws UserException
*/
private boolean dropFunctionImpl(FunctionSearchDesc function, boolean ifExists) throws UserException {
String functionName = function.getName().getFunction();
List<Function> existFuncs = name2Function.get(functionName);
if (existFuncs == null) {
if (ifExists) {
LOG.debug("function name does not exist: " + functionName);
return false;
}
throw new UserException("function name does not exist: " + functionName);
}
boolean isFound = false;
ImmutableList.Builder<Function> builder = ImmutableList.builder();
for (Function existFunc : existFuncs) {
if (function.isIdentical(existFunc)) {
isFound = true;
} else {
builder.add(existFunc);
}
}
if (!isFound) {
if (ifExists) {
LOG.debug("function does not exist: " + function);
return false;
}
throw new UserException("function does not exist: " + function);
}
ImmutableList<Function> newFunctions = builder.build();
if (newFunctions.isEmpty()) {
name2Function.remove(functionName);
} else {
name2Function.put(functionName, newFunctions);
}
return true;
}
public synchronized Function getFunction(Function desc, Function.CompareMode mode) {
List<Function> fns = name2Function.get(desc.getFunctionName().getFunction());
if (fns == null) {
return null;
}
return Function.getFunction(fns, desc, mode);
}
public synchronized Function getFunction(FunctionSearchDesc function) throws AnalysisException {
String functionName = function.getName().getFunction();
List<Function> existFuncs = name2Function.get(functionName);
if (existFuncs == null) {
throw new AnalysisException("Unknown function, function=" + function.toString());
}
for (Function existFunc : existFuncs) {
if (function.isIdentical(existFunc)) {
return existFunc;
}
}
throw new AnalysisException("Unknown function, function=" + function.toString());
}
public synchronized List<Function> getFunctions() {
List<Function> functions = Lists.newArrayList();
for (Map.Entry<String, ImmutableList<Function>> entry : name2Function.entrySet()) {
functions.addAll(entry.getValue());
}
return functions;
}
public boolean isInfoSchemaDb() {
return ClusterNamespace.getNameFromFullName(fullQualifiedName).equalsIgnoreCase(InfoSchemaDb.DATABASE_NAME);
}
public synchronized void addEncryptKey(EncryptKey encryptKey, boolean ifNotExists) throws UserException {
if (addEncryptKeyImpl(encryptKey, false, ifNotExists)) {
Env.getCurrentEnv().getEditLog().logAddEncryptKey(encryptKey);
}
}
public synchronized void replayAddEncryptKey(EncryptKey encryptKey) {
try {
addEncryptKeyImpl(encryptKey, true, true);
} catch (UserException e) {
Preconditions.checkArgument(false);
}
}
private boolean addEncryptKeyImpl(EncryptKey encryptKey, boolean isReplay, boolean ifNotExists)
throws UserException {
String keyName = encryptKey.getEncryptKeyName().getKeyName();
EncryptKey existKey = dbEncryptKey.getName2EncryptKey().get(keyName);
if (!isReplay) {
if (existKey != null) {
if (existKey.isIdentical(encryptKey)) {
if (ifNotExists) {
return false;
}
throw new UserException("encryptKey ["
+ existKey.getEncryptKeyName().toString() + "] already exists");
}
}
}
dbEncryptKey.getName2EncryptKey().put(keyName, encryptKey);
return true;
}
public synchronized void dropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc, boolean ifExists)
throws UserException {
if (dropEncryptKeyImpl(encryptKeySearchDesc, ifExists)) {
Env.getCurrentEnv().getEditLog().logDropEncryptKey(encryptKeySearchDesc);
}
}
public synchronized void replayDropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc) {
try {
dropEncryptKeyImpl(encryptKeySearchDesc, true);
} catch (UserException e) {
Preconditions.checkArgument(false);
}
}
private boolean dropEncryptKeyImpl(EncryptKeySearchDesc encryptKeySearchDesc, boolean ifExists)
throws UserException {
String keyName = encryptKeySearchDesc.getKeyEncryptKeyName().getKeyName();
EncryptKey existKey = dbEncryptKey.getName2EncryptKey().get(keyName);
if (existKey == null) {
if (ifExists) {
return false;
}
throw new UserException("Unknown encryptKey, encryptKey=" + encryptKeySearchDesc.toString());
}
boolean isFound = false;
if (encryptKeySearchDesc.isIdentical(existKey)) {
isFound = true;
}
if (!isFound) {
if (ifExists) {
return false;
}
throw new UserException("Unknown encryptKey, encryptKey=" + encryptKeySearchDesc.toString());
}
dbEncryptKey.getName2EncryptKey().remove(keyName);
return true;
}
public synchronized List<EncryptKey> getEncryptKeys() {
List<EncryptKey> encryptKeys = Lists.newArrayList();
for (Map.Entry<String, EncryptKey> entry : dbEncryptKey.getName2EncryptKey().entrySet()) {
encryptKeys.add(entry.getValue());
}
return encryptKeys;
}
public synchronized EncryptKey getEncryptKey(String keyName) {
if (dbEncryptKey.getName2EncryptKey().containsKey(keyName)) {
return dbEncryptKey.getName2EncryptKey().get(keyName);
}
return null;
}
} |
Yes because the ID field name is always `_id` in MongoDB so it's easy to implement ;) | static String bindQuery(Class<?> clazz, String query, Object[] params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
LOGGER.debug(bindQuery);
return bindQuery;
}
/**
* We should have a query like <code>{'firstname': :firstname, 'lastname': :lastname}</code> for native one
* and like <code>firstname = :firstname and lastname = :lastname</code> for PanacheQL one.
*/
static String bindQuery(Class<?> clazz, String query, Map<String, Object> params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
LOGGER.debug(bindQuery);
return bindQuery;
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Map<String, Object> params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, docQuery, docSort);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Parameters params) {
return find(entityClass, query, null, params.map());
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Sort sort, Parameters params) {
return find(entityClass, query, sort, params.map());
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> find(Class<?> entityClass, Document query, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return new ReactivePanacheQueryImpl(collection, entityClass, query, sortDoc);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, Document query, Document sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, query, sort);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, Document query) {
return find(entityClass, query, (Document) null);
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Object... params) {
return (Uni) find(entityClass, query, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Object... params) {
return (Uni) find(entityClass, query, sort, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Map<String, Object> params) {
return (Uni) find(entityClass, query, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return (Uni) find(entityClass, query, sort, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Parameters params) {
return (Uni) find(entityClass, query, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Parameters params) {
return (Uni) find(entityClass, query, sort, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, Document query) {
return (Uni) find(entityClass, query).list();
}
public static Uni<List<?>> list(Class<?> entityClass, Document query, Document sort) {
return (Uni) find(entityClass, query, sort).list();
}
public static Multi<?> stream(Class<?> entityClass, String query, Object... params) {
return find(entityClass, query, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Sort sort, Object... params) {
return find(entityClass, query, sort, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Map<String, Object> params) {
return find(entityClass, query, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return find(entityClass, query, sort, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Parameters params) {
return find(entityClass, query, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Sort sort, Parameters params) {
return find(entityClass, query, sort, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, Document query) {
return find(entityClass, query).stream();
}
public static Multi<?> stream(Class<?> entityClass, Document query, Document sort) {
return find(entityClass, query, sort).stream();
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> findAll(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, null, null);
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> findAll(Class<?> entityClass, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return new ReactivePanacheQueryImpl(collection, entityClass, null, sortDoc);
}
private static Document sortToDocument(Sort sort) {
if (sort == null) {
return null;
}
Document sortDoc = new Document();
for (Sort.Column col : sort.getColumns()) {
sortDoc.append(col.getName(), col.getDirection() == Sort.Direction.Ascending ? 1 : -1);
}
return sortDoc;
}
public static Uni<List<?>> listAll(Class<?> entityClass) {
return (Uni) findAll(entityClass).list();
}
public static Uni<List<?>> listAll(Class<?> entityClass, Sort sort) {
return (Uni) findAll(entityClass, sort).list();
}
public static Multi<?> streamAll(Class<?> entityClass) {
return findAll(entityClass).stream();
}
public static Multi<?> streamAll(Class<?> entityClass, Sort sort) {
return findAll(entityClass, sort).stream();
}
public static Uni<Long> count(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments();
}
public static Uni<Long> count(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public static Uni<Long> count(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public static Uni<Long> count(Class<?> entityClass, String query, Parameters params) {
return count(entityClass, query, params.map());
}
public static Uni<Long> count(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.countDocuments(query);
}
public static Uni<Long> deleteAll(Class<?> entityClass) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(new Document()).map(deleteResult -> deleteResult.getDeletedCount());
}
public static Uni<Boolean> deleteById(Class<?> entityClass, Object id) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
Document query = new Document().append(ID, id);
return collection.deleteOne(query).map(results -> results.getDeletedCount() == 1);
}
public static Uni<Long> delete(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public static Uni<Long> delete(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public static Uni<Long> delete(Class<?> entityClass, String query, Parameters params) {
return delete(entityClass, query, params.map());
}
public static Uni<Long> delete(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(query).map(deleteResult -> deleteResult.getDeletedCount());
}
public static IllegalStateException implementationInjectionMissing() {
return new IllegalStateException(
"This method is normally automatically overridden in subclasses");
}
} | return collection.deleteOne(query).map(results -> results.getDeletedCount() == 1); | static String bindQuery(Class<?> clazz, String query, Object[] params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
LOGGER.debug(bindQuery);
return bindQuery;
}
/**
* We should have a query like <code>{'firstname': :firstname, 'lastname': :lastname}</code> for native one
* and like <code>firstname = :firstname and lastname = :lastname</code> for PanacheQL one.
*/
static String bindQuery(Class<?> clazz, String query, Map<String, Object> params) {
String bindQuery = null;
if (query.charAt(0) == '{') {
bindQuery = NativeQueryBinder.bindQuery(query, params);
} else {
bindQuery = PanacheQlQueryBinder.bindQuery(clazz, query, params);
}
LOGGER.debug(bindQuery);
return bindQuery;
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Map<String, Object> params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, docQuery, docSort);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Parameters params) {
return find(entityClass, query, null, params.map());
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Sort sort, Parameters params) {
return find(entityClass, query, sort, params.map());
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> find(Class<?> entityClass, Document query, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return new ReactivePanacheQueryImpl(collection, entityClass, query, sortDoc);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, Document query, Document sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, query, sort);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, Document query) {
return find(entityClass, query, (Document) null);
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Object... params) {
return (Uni) find(entityClass, query, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Object... params) {
return (Uni) find(entityClass, query, sort, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Map<String, Object> params) {
return (Uni) find(entityClass, query, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return (Uni) find(entityClass, query, sort, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Parameters params) {
return (Uni) find(entityClass, query, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Parameters params) {
return (Uni) find(entityClass, query, sort, params).list();
}
public static Uni<List<?>> list(Class<?> entityClass, Document query) {
return (Uni) find(entityClass, query).list();
}
public static Uni<List<?>> list(Class<?> entityClass, Document query, Document sort) {
return (Uni) find(entityClass, query, sort).list();
}
public static Multi<?> stream(Class<?> entityClass, String query, Object... params) {
return find(entityClass, query, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Sort sort, Object... params) {
return find(entityClass, query, sort, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Map<String, Object> params) {
return find(entityClass, query, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return find(entityClass, query, sort, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Parameters params) {
return find(entityClass, query, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, String query, Sort sort, Parameters params) {
return find(entityClass, query, sort, params).stream();
}
public static Multi<?> stream(Class<?> entityClass, Document query) {
return find(entityClass, query).stream();
}
public static Multi<?> stream(Class<?> entityClass, Document query, Document sort) {
return find(entityClass, query, sort).stream();
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> findAll(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, null, null);
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> findAll(Class<?> entityClass, Sort sort) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
Document sortDoc = sortToDocument(sort);
return new ReactivePanacheQueryImpl(collection, entityClass, null, sortDoc);
}
private static Document sortToDocument(Sort sort) {
if (sort == null) {
return null;
}
Document sortDoc = new Document();
for (Sort.Column col : sort.getColumns()) {
sortDoc.append(col.getName(), col.getDirection() == Sort.Direction.Ascending ? 1 : -1);
}
return sortDoc;
}
public static Uni<List<?>> listAll(Class<?> entityClass) {
return (Uni) findAll(entityClass).list();
}
public static Uni<List<?>> listAll(Class<?> entityClass, Sort sort) {
return (Uni) findAll(entityClass, sort).list();
}
public static Multi<?> streamAll(Class<?> entityClass) {
return findAll(entityClass).stream();
}
public static Multi<?> streamAll(Class<?> entityClass, Sort sort) {
return findAll(entityClass, sort).stream();
}
public static Uni<Long> count(Class<?> entityClass) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments();
}
public static Uni<Long> count(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public static Uni<Long> count(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.countDocuments(docQuery);
}
public static Uni<Long> count(Class<?> entityClass, String query, Parameters params) {
return count(entityClass, query, params.map());
}
public static Uni<Long> count(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.countDocuments(query);
}
public static Uni<Long> deleteAll(Class<?> entityClass) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(new Document()).map(deleteResult -> deleteResult.getDeletedCount());
}
public static Uni<Boolean> deleteById(Class<?> entityClass, Object id) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
Document query = new Document().append(ID, id);
return collection.deleteOne(query).map(results -> results.getDeletedCount() == 1);
}
public static Uni<Long> delete(Class<?> entityClass, String query, Object... params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public static Uni<Long> delete(Class<?> entityClass, String query, Map<String, Object> params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(docQuery).map(deleteResult -> deleteResult.getDeletedCount());
}
public static Uni<Long> delete(Class<?> entityClass, String query, Parameters params) {
return delete(entityClass, query, params.map());
}
public static Uni<Long> delete(Class<?> entityClass, Document query) {
ReactiveMongoCollection<?> collection = mongoCollection(entityClass);
return collection.deleteMany(query).map(deleteResult -> deleteResult.getDeletedCount());
}
public static IllegalStateException implementationInjectionMissing() {
return new IllegalStateException(
"This method is normally automatically overridden in subclasses");
}
} | class ReactiveMongoOperations {
private static final Logger LOGGER = Logger.getLogger(ReactiveMongoOperations.class);
public static final String ID = "_id";
public static final String MONGODB_DATABASE = "quarkus.mongodb.database";
public static Uni<Void> persist(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persist(collection, entity);
}
public static Uni<Void> persist(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> persist(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persist(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persist(collection, entityList);
}
}
public static Uni<Void> persist(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> update(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return update(collection, entity);
}
public static Uni<Void> update(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> update(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return update(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return update(collection, entityList);
}
}
public static Uni<Void> update(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> persistOrUpdate(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persistOrUpdate(collection, entity);
}
public static Uni<Void> persistOrUpdate(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> persistOrUpdate(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persistOrUpdate(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persistOrUpdate(collection, entityList);
}
}
public static Uni<Void> persistOrUpdate(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> delete(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.deleteOne(query).onItem().ignore().andContinueWithNull();
}
public static ReactiveMongoCollection mongoCollection(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
ReactiveMongoDatabase database = mongoDatabase(mongoEntity);
if (mongoEntity != null && !mongoEntity.collection().isEmpty()) {
return database.getCollection(mongoEntity.collection(), entityClass);
}
return database.getCollection(entityClass.getSimpleName(), entityClass);
}
public static ReactiveMongoDatabase mongoDatabase(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
return mongoDatabase(mongoEntity);
}
public static Uni<Void> nullUni() {
return Uni.createFrom().item((Void) null);
}
private static Uni<Void> persist(ReactiveMongoCollection collection, Object entity) {
return collection.insertOne(entity);
}
private static Uni<Void> persist(ReactiveMongoCollection collection, List<Object> entities) {
return collection.insertMany(entities);
}
private static Uni<Void> update(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity).onItem().ignore().andContinueWithNull();
}
private static Uni<Void> update(ReactiveMongoCollection collection, List<Object> entities) {
Uni<Void> ret = nullUni();
for (Object entity : entities) {
ret.and(update(collection, entity));
}
return ret.onItem().ignore().andContinueWithNull();
}
private static Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
return collection.insertOne(entity);
} else {
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity, ReplaceOptions.createReplaceOptions(new UpdateOptions().upsert(true)))
.onItem().ignore().andContinueWithNull();
}
}
private static Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, List<Object> entities) {
List<WriteModel> bulk = new ArrayList<>();
for (Object entity : entities) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
bulk.add(new InsertOneModel(entity));
} else {
BsonDocument query = new BsonDocument().append(ID, id);
bulk.add(new ReplaceOneModel(query, entity,
ReplaceOptions.createReplaceOptions(new UpdateOptions().upsert(true))));
}
}
return collection.bulkWrite(bulk).onItem().ignore().andContinueWithNull();
}
private static BsonDocument getBsonDocument(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = new BsonDocument();
Codec codec = collection.getCodecRegistry().get(entity.getClass());
codec.encode(new BsonDocumentWriter(document), entity, EncoderContext.builder().build());
return document;
}
private static ReactiveMongoCollection mongoCollection(Object entity) {
Class<?> entityClass = entity.getClass();
return mongoCollection(entityClass);
}
private static ReactiveMongoDatabase mongoDatabase(MongoEntity entity) {
ReactiveMongoClient mongoClient = Arc.container().instance(ReactiveMongoClient.class).get();
if (entity != null && !entity.database().isEmpty()) {
return mongoClient.getDatabase(entity.database());
}
String databaseName = ConfigProvider.getConfig()
.getValue(MONGODB_DATABASE, String.class);
return mongoClient.getDatabase(databaseName);
}
public static Uni<Object> findById(Class<?> entityClass, Object id) {
Uni<Optional> optionalEntity = findByIdOptional(entityClass, id);
return optionalEntity.onItem().apply(optional -> optional.orElse(null));
}
public static Uni<Optional> findByIdOptional(Class<?> entityClass, Object id) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.find(new Document(ID, id)).collectItems().first()
.onItem().apply(Optional::ofNullable);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Object... params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Sort sort, Object... params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, docQuery, docSort);
}
/**
* We should have a query like <code>{'firstname': ?1, 'lastname': ?2}</code> for native one
* and like <code>firstname = ?1</code> for PanacheQL one.
*/ | class ReactiveMongoOperations {
private static final Logger LOGGER = Logger.getLogger(ReactiveMongoOperations.class);
public static final String ID = "_id";
public static final String MONGODB_DATABASE = "quarkus.mongodb.database";
public static Uni<Void> persist(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persist(collection, entity);
}
public static Uni<Void> persist(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> persist(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persist(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persist(collection, entityList);
}
}
public static Uni<Void> persist(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persist(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> update(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return update(collection, entity);
}
public static Uni<Void> update(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> update(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return update(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return update(collection, entityList);
}
}
public static Uni<Void> update(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return update(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> persistOrUpdate(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
return persistOrUpdate(collection, entity);
}
public static Uni<Void> persistOrUpdate(Iterable<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = new ArrayList<>();
for (Object entity : entities) {
objects.add(entity);
}
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> persistOrUpdate(Object firstEntity, Object... entities) {
ReactiveMongoCollection collection = mongoCollection(firstEntity);
if (entities == null || entities.length == 0) {
return persistOrUpdate(collection, firstEntity);
} else {
List<Object> entityList = new ArrayList<>();
entityList.add(firstEntity);
entityList.addAll(Arrays.asList(entities));
return persistOrUpdate(collection, entityList);
}
}
public static Uni<Void> persistOrUpdate(Stream<?> entities) {
return Uni.createFrom().deferred(() -> {
List<Object> objects = entities.collect(Collectors.toList());
if (objects.size() > 0) {
Object firstEntity = objects.get(0);
ReactiveMongoCollection collection = mongoCollection(firstEntity);
return persistOrUpdate(collection, objects);
}
return nullUni();
});
}
public static Uni<Void> delete(Object entity) {
ReactiveMongoCollection collection = mongoCollection(entity);
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.deleteOne(query).onItem().ignore().andContinueWithNull();
}
public static ReactiveMongoCollection mongoCollection(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
ReactiveMongoDatabase database = mongoDatabase(mongoEntity);
if (mongoEntity != null && !mongoEntity.collection().isEmpty()) {
return database.getCollection(mongoEntity.collection(), entityClass);
}
return database.getCollection(entityClass.getSimpleName(), entityClass);
}
public static ReactiveMongoDatabase mongoDatabase(Class<?> entityClass) {
MongoEntity mongoEntity = entityClass.getAnnotation(MongoEntity.class);
return mongoDatabase(mongoEntity);
}
public static Uni<Void> nullUni() {
return Uni.createFrom().item((Void) null);
}
private static Uni<Void> persist(ReactiveMongoCollection collection, Object entity) {
return collection.insertOne(entity);
}
private static Uni<Void> persist(ReactiveMongoCollection collection, List<Object> entities) {
return collection.insertMany(entities);
}
private static Uni<Void> update(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity).onItem().ignore().andContinueWithNull();
}
private static Uni<Void> update(ReactiveMongoCollection collection, List<Object> entities) {
Uni<Void> ret = nullUni();
for (Object entity : entities) {
ret.and(update(collection, entity));
}
return ret.onItem().ignore().andContinueWithNull();
}
private static Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
return collection.insertOne(entity);
} else {
BsonDocument query = new BsonDocument().append(ID, id);
return collection.replaceOne(query, entity, ReplaceOptions.createReplaceOptions(new UpdateOptions().upsert(true)))
.onItem().ignore().andContinueWithNull();
}
}
private static Uni<Void> persistOrUpdate(ReactiveMongoCollection collection, List<Object> entities) {
List<WriteModel> bulk = new ArrayList<>();
for (Object entity : entities) {
BsonDocument document = getBsonDocument(collection, entity);
BsonValue id = document.get(ID);
if (id == null) {
bulk.add(new InsertOneModel(entity));
} else {
BsonDocument query = new BsonDocument().append(ID, id);
bulk.add(new ReplaceOneModel(query, entity,
ReplaceOptions.createReplaceOptions(new UpdateOptions().upsert(true))));
}
}
return collection.bulkWrite(bulk).onItem().ignore().andContinueWithNull();
}
private static BsonDocument getBsonDocument(ReactiveMongoCollection collection, Object entity) {
BsonDocument document = new BsonDocument();
Codec codec = collection.getCodecRegistry().get(entity.getClass());
codec.encode(new BsonDocumentWriter(document), entity, EncoderContext.builder().build());
return document;
}
private static ReactiveMongoCollection mongoCollection(Object entity) {
Class<?> entityClass = entity.getClass();
return mongoCollection(entityClass);
}
private static ReactiveMongoDatabase mongoDatabase(MongoEntity entity) {
ReactiveMongoClient mongoClient = Arc.container().instance(ReactiveMongoClient.class).get();
if (entity != null && !entity.database().isEmpty()) {
return mongoClient.getDatabase(entity.database());
}
String databaseName = ConfigProvider.getConfig()
.getValue(MONGODB_DATABASE, String.class);
return mongoClient.getDatabase(databaseName);
}
public static Uni<Object> findById(Class<?> entityClass, Object id) {
Uni<Optional> optionalEntity = findByIdOptional(entityClass, id);
return optionalEntity.onItem().apply(optional -> optional.orElse(null));
}
public static Uni<Optional> findByIdOptional(Class<?> entityClass, Object id) {
ReactiveMongoCollection collection = mongoCollection(entityClass);
return collection.find(new Document(ID, id)).collectItems().first()
.onItem().apply(Optional::ofNullable);
}
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Object... params) {
return find(entityClass, query, null, params);
}
@SuppressWarnings("rawtypes")
public static ReactivePanacheQuery<?> find(Class<?> entityClass, String query, Sort sort, Object... params) {
String bindQuery = bindQuery(entityClass, query, params);
Document docQuery = Document.parse(bindQuery);
Document docSort = sortToDocument(sort);
ReactiveMongoCollection collection = mongoCollection(entityClass);
return new ReactivePanacheQueryImpl(collection, entityClass, docQuery, docSort);
}
/**
* We should have a query like <code>{'firstname': ?1, 'lastname': ?2}</code> for native one
* and like <code>firstname = ?1</code> for PanacheQL one.
*/ |
> Works only if the `generation.halt-on-error` is set to true. See `application-invalid-multiline-test.properties` and the associated test. I am still thinking about this. What I meant to say is, the message will be displayed when ``` quarkus.hibernate-orm.database.generation.halt-on-error=true ``` config is set to true (only why to intercept the exception). Otherwise whatever went wrong regarding load script file will be logged and the application starts successfully (normal `ExceptionHandler` behaviour) but without our descriptive message related to multiline. | private PersistenceException persistenceException(String message, Exception cause) {
Throwable t = cause;
while (t != null) {
if (t instanceof NoSuchAlgorithmException) {
message += "Unable to enable SSL support. You might be in the case where you used the `quarkus.ssl.native=false` configuration"
+ " and SSL was not disabled automatically for your driver.";
break;
}
if (t instanceof CommandAcceptanceException) {
message = "Invalid import file. Make sure your statements are valid and properly separated by a semi-colon.";
break;
}
t = t.getCause();
}
return new PersistenceException(getExceptionHeader() + message, cause);
} | } | private PersistenceException persistenceException(String message, Exception cause) {
Throwable t = cause;
while (t != null) {
if (t instanceof NoSuchAlgorithmException) {
message += "Unable to enable SSL support. You might be in the case where you used the `quarkus.ssl.native=false` configuration"
+ " and SSL was not disabled automatically for your driver.";
break;
}
if (t instanceof CommandAcceptanceException) {
message = "Invalid import file. Make sure your statements are valid and properly separated by a semi-colon.";
break;
}
t = t.getCause();
}
return new PersistenceException(getExceptionHeader() + message, cause);
} | class FastBootEntityManagerFactoryBuilder implements EntityManagerFactoryBuilder {
private final MetadataImplementor metadata;
private final String persistenceUnitName;
private final StandardServiceRegistry standardServiceRegistry;
private final RuntimeSettings runtimeSettings;
private final Object validatorFactory;
private final Object cdiBeanManager;
public FastBootEntityManagerFactoryBuilder(MetadataImplementor metadata, String persistenceUnitName,
StandardServiceRegistry standardServiceRegistry, RuntimeSettings runtimeSettings, Object validatorFactory,
Object cdiBeanManager) {
this.metadata = metadata;
this.persistenceUnitName = persistenceUnitName;
this.standardServiceRegistry = standardServiceRegistry;
this.runtimeSettings = runtimeSettings;
this.validatorFactory = validatorFactory;
this.cdiBeanManager = cdiBeanManager;
}
@Override
public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) {
return null;
}
@Override
public EntityManagerFactoryBuilder withDataSource(DataSource dataSource) {
return null;
}
@Override
public EntityManagerFactory build() {
SessionFactoryBuilder sfBuilder = metadata.getSessionFactoryBuilder();
populate(sfBuilder, standardServiceRegistry);
try {
return sfBuilder.build();
} catch (Exception e) {
throw persistenceException("Unable to build Hibernate SessionFactory", e);
}
}
@Override
public void cancel() {
}
@Override
public void generateSchema() {
try {
SessionFactoryBuilder sfBuilder = metadata.getSessionFactoryBuilder();
populate(sfBuilder, standardServiceRegistry);
SchemaManagementToolCoordinator.process(metadata, standardServiceRegistry, runtimeSettings.getSettings(),
DelayedDropRegistryNotAvailableImpl.INSTANCE);
} catch (Exception e) {
throw persistenceException("Error performing schema management", e);
}
cancel();
}
private String getExceptionHeader() {
return "[PersistenceUnit: " + persistenceUnitName + "] ";
}
protected void populate(SessionFactoryBuilder sfBuilder, StandardServiceRegistry ssr) {
final boolean jtaTransactionAccessEnabled = runtimeSettings.getBoolean(
AvailableSettings.ALLOW_JTA_TRANSACTION_ACCESS);
if (!jtaTransactionAccessEnabled) {
((SessionFactoryBuilderImplementor) sfBuilder).disableJtaTransactionAccess();
}
final boolean allowRefreshDetachedEntity = runtimeSettings.getBoolean(
org.hibernate.cfg.AvailableSettings.ALLOW_REFRESH_DETACHED_ENTITY);
if (!allowRefreshDetachedEntity) {
((SessionFactoryBuilderImplementor) sfBuilder).disableRefreshDetachedEntity();
}
final Object sessionFactoryObserverSetting = runtimeSettings.get(AvailableSettings.SESSION_FACTORY_OBSERVER);
if (sessionFactoryObserverSetting != null) {
final StrategySelector strategySelector = ssr.getService(StrategySelector.class);
final SessionFactoryObserver suppliedSessionFactoryObserver = strategySelector
.resolveStrategy(SessionFactoryObserver.class, sessionFactoryObserverSetting);
sfBuilder.addSessionFactoryObservers(suppliedSessionFactoryObserver);
}
sfBuilder.addSessionFactoryObservers(new ServiceRegistryCloser());
sfBuilder.applyEntityNotFoundDelegate(new JpaEntityNotFoundDelegate());
if (this.validatorFactory != null) {
sfBuilder.applyValidatorFactory(validatorFactory);
}
if (this.cdiBeanManager != null) {
sfBuilder.applyBeanManager(cdiBeanManager);
}
}
private static class ServiceRegistryCloser implements SessionFactoryObserver {
@Override
public void sessionFactoryCreated(SessionFactory sessionFactory) {
}
@Override
public void sessionFactoryClosed(SessionFactory sessionFactory) {
SessionFactoryImplementor sfi = ((SessionFactoryImplementor) sessionFactory);
sfi.getServiceRegistry().destroy();
ServiceRegistry basicRegistry = sfi.getServiceRegistry().getParentServiceRegistry();
((ServiceRegistryImplementor) basicRegistry).destroy();
}
}
private static class JpaEntityNotFoundDelegate implements EntityNotFoundDelegate, Serializable {
@Override
public void handleEntityNotFound(String entityName, Serializable id) {
throw new EntityNotFoundException("Unable to find " + entityName + " with id " + id);
}
}
} | class FastBootEntityManagerFactoryBuilder implements EntityManagerFactoryBuilder {
private final MetadataImplementor metadata;
private final String persistenceUnitName;
private final StandardServiceRegistry standardServiceRegistry;
private final RuntimeSettings runtimeSettings;
private final Object validatorFactory;
private final Object cdiBeanManager;
public FastBootEntityManagerFactoryBuilder(MetadataImplementor metadata, String persistenceUnitName,
StandardServiceRegistry standardServiceRegistry, RuntimeSettings runtimeSettings, Object validatorFactory,
Object cdiBeanManager) {
this.metadata = metadata;
this.persistenceUnitName = persistenceUnitName;
this.standardServiceRegistry = standardServiceRegistry;
this.runtimeSettings = runtimeSettings;
this.validatorFactory = validatorFactory;
this.cdiBeanManager = cdiBeanManager;
}
@Override
public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) {
return null;
}
@Override
public EntityManagerFactoryBuilder withDataSource(DataSource dataSource) {
return null;
}
@Override
public EntityManagerFactory build() {
SessionFactoryBuilder sfBuilder = metadata.getSessionFactoryBuilder();
populate(sfBuilder, standardServiceRegistry);
try {
return sfBuilder.build();
} catch (Exception e) {
throw persistenceException("Unable to build Hibernate SessionFactory", e);
}
}
@Override
public void cancel() {
}
@Override
public void generateSchema() {
try {
SessionFactoryBuilder sfBuilder = metadata.getSessionFactoryBuilder();
populate(sfBuilder, standardServiceRegistry);
SchemaManagementToolCoordinator.process(metadata, standardServiceRegistry, runtimeSettings.getSettings(),
DelayedDropRegistryNotAvailableImpl.INSTANCE);
} catch (Exception e) {
throw persistenceException("Error performing schema management", e);
}
cancel();
}
private String getExceptionHeader() {
return "[PersistenceUnit: " + persistenceUnitName + "] ";
}
protected void populate(SessionFactoryBuilder sfBuilder, StandardServiceRegistry ssr) {
final boolean jtaTransactionAccessEnabled = runtimeSettings.getBoolean(
AvailableSettings.ALLOW_JTA_TRANSACTION_ACCESS);
if (!jtaTransactionAccessEnabled) {
((SessionFactoryBuilderImplementor) sfBuilder).disableJtaTransactionAccess();
}
final boolean allowRefreshDetachedEntity = runtimeSettings.getBoolean(
org.hibernate.cfg.AvailableSettings.ALLOW_REFRESH_DETACHED_ENTITY);
if (!allowRefreshDetachedEntity) {
((SessionFactoryBuilderImplementor) sfBuilder).disableRefreshDetachedEntity();
}
final Object sessionFactoryObserverSetting = runtimeSettings.get(AvailableSettings.SESSION_FACTORY_OBSERVER);
if (sessionFactoryObserverSetting != null) {
final StrategySelector strategySelector = ssr.getService(StrategySelector.class);
final SessionFactoryObserver suppliedSessionFactoryObserver = strategySelector
.resolveStrategy(SessionFactoryObserver.class, sessionFactoryObserverSetting);
sfBuilder.addSessionFactoryObservers(suppliedSessionFactoryObserver);
}
sfBuilder.addSessionFactoryObservers(new ServiceRegistryCloser());
sfBuilder.applyEntityNotFoundDelegate(new JpaEntityNotFoundDelegate());
if (this.validatorFactory != null) {
sfBuilder.applyValidatorFactory(validatorFactory);
}
if (this.cdiBeanManager != null) {
sfBuilder.applyBeanManager(cdiBeanManager);
}
}
private static class ServiceRegistryCloser implements SessionFactoryObserver {
@Override
public void sessionFactoryCreated(SessionFactory sessionFactory) {
}
@Override
public void sessionFactoryClosed(SessionFactory sessionFactory) {
SessionFactoryImplementor sfi = ((SessionFactoryImplementor) sessionFactory);
sfi.getServiceRegistry().destroy();
ServiceRegistry basicRegistry = sfi.getServiceRegistry().getParentServiceRegistry();
((ServiceRegistryImplementor) basicRegistry).destroy();
}
}
private static class JpaEntityNotFoundDelegate implements EntityNotFoundDelegate, Serializable {
@Override
public void handleEntityNotFound(String entityName, Serializable id) {
throw new EntityNotFoundException("Unable to find " + entityName + " with id " + id);
}
}
} |
Incorrect indentation. I'l merge this PR anyway. Please fix it in the next PR. | private ParserRuleContext getNextRule(ParserRuleContext currentCtx, int nextLookahead) {
switch (currentCtx) {
case COMP_UNIT:
case FUNC_DEFINITION:
case RETURN_TYPE_DESCRIPTOR:
case EXTERNAL_FUNC_BODY:
case FUNC_BODY_BLOCK:
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case VAR_DECL_STMT:
case ASSIGNMENT_STMT:
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
case MODULE_TYPE_DEFINITION:
case RECORD_FIELD:
case RECORD_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR:
case ARG:
case ARG_LIST:
case OBJECT_FUNC_OR_FIELD:
case IF_BLOCK:
case BLOCK_STMT:
case WHILE_BLOCK:
case PANIC_STMT:
case CALL_STMT:
case IMPORT_DECL:
case CONTINUE_STATEMENT:
case BREAK_STATEMENT:
case RETURN_STMT:
case COMPUTED_FIELD_NAME:
case LISTENERS_LIST:
case SERVICE_DECL:
case LISTENER_DECL:
case CONSTANT_DECL:
case NIL_TYPE_DESCRIPTOR:
case COMPOUND_ASSIGNMENT_STMT:
case OPTIONAL_TYPE_DESCRIPTOR:
case ANNOTATIONS:
case VARIABLE_REF:
case TYPE_REFERENCE:
case ANNOT_REFERENCE:
case MAPPING_CONSTRUCTOR:
startContext(currentCtx);
break;
default:
break;
}
ParserRuleContext parentCtx;
STToken nextToken;
switch (currentCtx) {
case EOF:
return ParserRuleContext.EOF;
case COMP_UNIT:
return ParserRuleContext.TOP_LEVEL_NODE;
case PUBLIC_KEYWORD:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.OBJECT_TYPE_DESCRIPTOR) {
return ParserRuleContext.OBJECT_FUNC_OR_FIELD;
} else if (isParameter(parentCtx)) {
return ParserRuleContext.TYPE_DESCRIPTOR;
}
return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER;
case PRIVATE_KEYWORD:
return ParserRuleContext.OBJECT_FUNC_OR_FIELD;
case FUNC_DEFINITION:
return ParserRuleContext.FUNCTION_KEYWORD;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.RETURNS_KEYWORD;
case EXTERNAL_FUNC_BODY:
return ParserRuleContext.ASSIGN_OP;
case FUNC_BODY_BLOCK:
return ParserRuleContext.OPEN_BRACE;
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
endContext();
return ParserRuleContext.CLOSE_BRACE;
case ASSIGN_OP:
return getNextRuleForEqualOp();
case COMPOUND_BINARY_OPERATOR:
return ParserRuleContext.ASSIGN_OP;
case CLOSE_BRACE:
return getNextRuleForCloseBrace(nextLookahead);
case CLOSE_PARENTHESIS:
parentCtx = getParentContext();
if (isParameter(parentCtx)) {
endContext();
endContext();
}
if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR) {
endContext();
return getNextRuleForTypeDescriptor();
}
return ParserRuleContext.FUNC_BODY;
case EXPRESSION:
case BASIC_LITERAL:
return ParserRuleContext.EXPRESSION_RHS;
case EXTERNAL_KEYWORD:
return ParserRuleContext.SEMICOLON;
case FUNCTION_KEYWORD:
return ParserRuleContext.FUNC_NAME;
case FUNC_NAME:
return ParserRuleContext.OPEN_PARENTHESIS;
case OPEN_BRACE:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.LISTENERS_LIST) {
endContext();
}
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
return ParserRuleContext.CLOSE_BRACE;
}
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) {
return ParserRuleContext.MAPPING_FIELD;
}
return ParserRuleContext.STATEMENT;
case OPEN_PARENTHESIS:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.FUNC_DEFINITION) {
return ParserRuleContext.PARAM_LIST;
}
if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR) {
return ParserRuleContext.CLOSE_PARENTHESIS;
}
return ParserRuleContext.ARG;
case RETURNS_KEYWORD:
if (this.tokenReader.peek(nextLookahead).kind != SyntaxKind.RETURNS_KEYWORD) {
return ParserRuleContext.FUNC_BODY;
}
return ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR;
case SEMICOLON:
return getNextRuleForSemicolon(nextLookahead);
case SIMPLE_TYPE_DESCRIPTOR:
return getNextRuleForTypeDescriptor();
case VARIABLE_NAME:
case PARAMETER_RHS:
return getNextRuleForVarName(nextLookahead);
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
return ParserRuleContext.FUNC_DEFINITION;
case FUNC_BODY:
return ParserRuleContext.TOP_LEVEL_NODE;
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
nextToken = this.tokenReader.peek(nextLookahead);
if (isEndOfParametersList(nextToken)) {
endContext();
return ParserRuleContext.CLOSE_PARENTHESIS;
}
return ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR;
case ASSIGNMENT_STMT:
return ParserRuleContext.VARIABLE_NAME;
case COMPOUND_ASSIGNMENT_STMT:
return ParserRuleContext.VARIABLE_NAME;
case VAR_DECL_STMT:
return ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR;
case EXPRESSION_RHS:
return ParserRuleContext.BINARY_OPERATOR;
case BINARY_OPERATOR:
return ParserRuleContext.EXPRESSION;
case COMMA:
return getNextRuleForComma();
case AFTER_PARAMETER_TYPE:
return getNextRuleForParamType();
case MODULE_TYPE_DEFINITION:
return ParserRuleContext.TYPE_KEYWORD;
case CLOSED_RECORD_BODY_END:
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
case CLOSED_RECORD_BODY_START:
startContext(ParserRuleContext.RECORD_FIELD);
return ParserRuleContext.RECORD_FIELD;
case ELLIPSIS:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR || parentCtx == ParserRuleContext.ARG) {
return ParserRuleContext.EXPRESSION;
}
return ParserRuleContext.VARIABLE_NAME;
case QUESTION_MARK:
return getNextRuleForQuestionMark();
case RECORD_KEYWORD:
return ParserRuleContext.RECORD_BODY_START;
case TYPE_KEYWORD:
return ParserRuleContext.TYPE_NAME;
case RECORD_TYPE_DESCRIPTOR:
return ParserRuleContext.RECORD_KEYWORD;
case ASTERISK:
return ParserRuleContext.TYPE_REFERENCE;
case TYPE_NAME:
return ParserRuleContext.TYPE_DESCRIPTOR;
case OBJECT_KEYWORD:
return ParserRuleContext.OPEN_BRACE;
case REMOTE_KEYWORD:
return ParserRuleContext.FUNCTION_KEYWORD;
case OBJECT_TYPE_DESCRIPTOR:
return ParserRuleContext.OBJECT_TYPE_DESCRIPTOR_START;
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
return ParserRuleContext.OBJECT_KEYWORD;
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
return ParserRuleContext.OBJECT_KEYWORD;
case OPEN_BRACKET:
return ParserRuleContext.EXPRESSION;
case CLOSE_BRACKET:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.COMPUTED_FIELD_NAME) {
endContext();
}
return ParserRuleContext.EXPRESSION_RHS;
case FIELD_OR_FUNC_NAME:
return ParserRuleContext.EXPRESSION_RHS;
case DOT:
return getNextRuleForDot();
case IF_KEYWORD:
return ParserRuleContext.EXPRESSION;
case ELSE_KEYWORD:
return ParserRuleContext.ELSE_BODY;
case BLOCK_STMT:
return ParserRuleContext.OPEN_BRACE;
case IF_BLOCK:
return ParserRuleContext.IF_KEYWORD;
case WHILE_BLOCK:
return ParserRuleContext.WHILE_KEYWORD;
case WHILE_KEYWORD:
return ParserRuleContext.EXPRESSION;
case CHECKING_KEYWORD:
return ParserRuleContext.EXPRESSION;
case CALL_STMT:
return ParserRuleContext.CALL_STMT_START;
case PANIC_STMT:
return ParserRuleContext.PANIC_KEYWORD;
case PANIC_KEYWORD:
return ParserRuleContext.EXPRESSION;
case FUNC_CALL:
return ParserRuleContext.IMPORT_PREFIX;
case IMPORT_KEYWORD:
return ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME;
case IMPORT_PREFIX:
return ParserRuleContext.SEMICOLON;
case VERSION_NUMBER:
case VERSION_KEYWORD:
return ParserRuleContext.MAJOR_VERSION;
case SLASH:
return ParserRuleContext.IMPORT_MODULE_NAME;
case IMPORT_ORG_OR_MODULE_NAME:
return ParserRuleContext.IMPORT_DECL_RHS;
case IMPORT_MODULE_NAME:
return ParserRuleContext.AFTER_IMPORT_MODULE_NAME;
case AS_KEYWORD:
return ParserRuleContext.IMPORT_PREFIX;
case MAJOR_VERSION:
case MINOR_VERSION:
case IMPORT_SUB_VERSION:
return ParserRuleContext.MAJOR_MINOR_VERSION_END;
case PATCH_VERSION:
return ParserRuleContext.IMPORT_PREFIX_DECL;
case IMPORT_DECL:
return ParserRuleContext.IMPORT_KEYWORD;
case CONTINUE_STATEMENT:
return ParserRuleContext.CONTINUE_KEYWORD;
case BREAK_STATEMENT:
return ParserRuleContext.BREAK_KEYWORD;
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
return ParserRuleContext.SEMICOLON;
case RETURN_STMT:
return ParserRuleContext.RETURN_KEYWORD;
case RETURN_KEYWORD:
return ParserRuleContext.RETURN_STMT_RHS;
case ACCESS_EXPRESSION:
return ParserRuleContext.VARIABLE_REF;
case MAPPING_FIELD_NAME:
return ParserRuleContext.SPECIFIC_FIELD_RHS;
case COLON:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) {
return ParserRuleContext.EXPRESSION;
}
return ParserRuleContext.IDENTIFIER;
case STRING_LITERAL:
return ParserRuleContext.COLON;
case COMPUTED_FIELD_NAME:
return ParserRuleContext.OPEN_BRACKET;
case LISTENERS_LIST:
return ParserRuleContext.EXPRESSION;
case ON_KEYWORD:
return ParserRuleContext.LISTENERS_LIST;
case RESOURCE_KEYWORD:
return ParserRuleContext.FUNC_DEFINITION;
case SERVICE_DECL:
return ParserRuleContext.SERVICE_KEYWORD;
case SERVICE_KEYWORD:
return ParserRuleContext.OPTIONAL_SERVICE_NAME;
case SERVICE_NAME:
return ParserRuleContext.ON_KEYWORD;
case LISTENER_KEYWORD:
return ParserRuleContext.TYPE_DESCRIPTOR;
case LISTENER_DECL:
return ParserRuleContext.LISTENER_KEYWORD;
case FINAL_KEYWORD:
return ParserRuleContext.TYPE_DESCRIPTOR;
case CONSTANT_DECL:
return ParserRuleContext.CONST_KEYWORD;
case CONST_KEYWORD:
return ParserRuleContext.CONST_DECL_TYPE;
case CONST_DECL_TYPE:
return ParserRuleContext.CONST_DECL_RHS;
case NIL_TYPE_DESCRIPTOR:
return ParserRuleContext.OPEN_PARENTHESIS;
case TYPEOF_EXPRESSION:
return ParserRuleContext.TYPEOF_KEYWORD;
case TYPEOF_KEYWORD:
return ParserRuleContext.EXPRESSION;
case OPTIONAL_TYPE_DESCRIPTOR:
return ParserRuleContext.TYPE_DESCRIPTOR;
case UNARY_EXPRESSION:
return ParserRuleContext.UNARY_OPERATOR;
case UNARY_OPERATOR:
return ParserRuleContext.EXPRESSION;
case AT:
return ParserRuleContext.ANNOT_REFERENCE;
case DOC_STRING:
return ParserRuleContext.ANNOTATIONS;
case ANNOTATIONS:
return ParserRuleContext.AT;
case MAPPING_CONSTRUCTOR:
return ParserRuleContext.OPEN_BRACE;
case VARIABLE_REF:
case TYPE_REFERENCE:
case ANNOT_REFERENCE:
return ParserRuleContext.QUALIFIED_IDENTIFIER;
case QUALIFIED_IDENTIFIER:
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.COLON_TOKEN) {
return ParserRuleContext.COLON;
}
case IDENTIFIER:
parentCtx = getParentContext();
endContext();
switch (parentCtx) {
case VARIABLE_REF:
return ParserRuleContext.EXPRESSION_RHS;
case TYPE_REFERENCE:
return ParserRuleContext.SEMICOLON;
case ANNOT_REFERENCE:
return ParserRuleContext.MAPPING_CONSTRUCTOR;
default:
throw new IllegalStateException();
}
case IS_KEYWORD:
return ParserRuleContext.TYPE_DESCRIPTOR;
case IS_EXPRESSION:
return ParserRuleContext.EXPRESSION_RHS;
case DECIMAL_INTEGER_LITERAL:
case OBJECT_FUNC_OR_FIELD:
case OBJECT_METHOD_START:
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
case OBJECT_FIELD_RHS:
case PARAM_LIST:
case ARG:
case ARG_LIST:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case ASSIGNMENT_OR_VAR_DECL_STMT_RHS:
case BOOLEAN_LITERAL:
case CALL_STMT_START:
case ELSE_BLOCK:
case ELSE_BODY:
case FIELD_DESCRIPTOR_RHS:
case FIELD_OR_REST_DESCIPTOR_RHS:
case IMPORT_PREFIX_DECL:
case NAMED_OR_POSITIONAL_ARG_RHS:
case OBJECT_MEMBER:
case OBJECT_TYPE_DESCRIPTOR_START:
case RECORD_BODY_END:
case RECORD_BODY_START:
case RECORD_FIELD:
case STATEMENT_START_IDENTIFIER:
case TOP_LEVEL_NODE_WITHOUT_METADATA:
case TYPE_DESCRIPTOR:
case VAR_DECL_STMT_RHS:
case AFTER_IMPORT_MODULE_NAME:
case IMPORT_DECL_RHS:
case IMPORT_VERSION_DECL:
case MAJOR_MINOR_VERSION_END:
case MAPPING_FIELD:
case SPECIFIC_FIELD_RHS:
case RETURN_STMT_RHS:
case OPTIONAL_SERVICE_NAME:
case RESOURCE_DEF:
case CONST_DECL_RHS:
case OBJECT_MEMBER_WITHOUT_METADATA:
case TOP_LEVEL_NODE:
default:
throw new IllegalStateException("cannot find the next rule for: " + currentCtx);
}
} | } | private ParserRuleContext getNextRule(ParserRuleContext currentCtx, int nextLookahead) {
switch (currentCtx) {
case COMP_UNIT:
case FUNC_DEFINITION:
case RETURN_TYPE_DESCRIPTOR:
case EXTERNAL_FUNC_BODY:
case FUNC_BODY_BLOCK:
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case VAR_DECL_STMT:
case ASSIGNMENT_STMT:
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
case MODULE_TYPE_DEFINITION:
case RECORD_FIELD:
case RECORD_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR:
case ARG:
case ARG_LIST:
case OBJECT_FUNC_OR_FIELD:
case IF_BLOCK:
case BLOCK_STMT:
case WHILE_BLOCK:
case PANIC_STMT:
case CALL_STMT:
case IMPORT_DECL:
case CONTINUE_STATEMENT:
case BREAK_STATEMENT:
case RETURN_STMT:
case COMPUTED_FIELD_NAME:
case LISTENERS_LIST:
case SERVICE_DECL:
case LISTENER_DECL:
case CONSTANT_DECL:
case NIL_TYPE_DESCRIPTOR:
case COMPOUND_ASSIGNMENT_STMT:
case OPTIONAL_TYPE_DESCRIPTOR:
case ANNOTATIONS:
case VARIABLE_REF:
case TYPE_REFERENCE:
case ANNOT_REFERENCE:
case MAPPING_CONSTRUCTOR:
startContext(currentCtx);
break;
default:
break;
}
ParserRuleContext parentCtx;
STToken nextToken;
switch (currentCtx) {
case EOF:
return ParserRuleContext.EOF;
case COMP_UNIT:
return ParserRuleContext.TOP_LEVEL_NODE;
case PUBLIC_KEYWORD:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.OBJECT_TYPE_DESCRIPTOR) {
return ParserRuleContext.OBJECT_FUNC_OR_FIELD;
} else if (isParameter(parentCtx)) {
return ParserRuleContext.TYPE_DESCRIPTOR;
}
return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_MODIFIER;
case PRIVATE_KEYWORD:
return ParserRuleContext.OBJECT_FUNC_OR_FIELD;
case FUNC_DEFINITION:
return ParserRuleContext.FUNCTION_KEYWORD;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.RETURNS_KEYWORD;
case EXTERNAL_FUNC_BODY:
return ParserRuleContext.ASSIGN_OP;
case FUNC_BODY_BLOCK:
return ParserRuleContext.OPEN_BRACE;
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
endContext();
return ParserRuleContext.CLOSE_BRACE;
case ASSIGN_OP:
return getNextRuleForEqualOp();
case COMPOUND_BINARY_OPERATOR:
return ParserRuleContext.ASSIGN_OP;
case CLOSE_BRACE:
return getNextRuleForCloseBrace(nextLookahead);
case CLOSE_PARENTHESIS:
parentCtx = getParentContext();
if (isParameter(parentCtx)) {
endContext();
endContext();
}
if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR) {
endContext();
return getNextRuleForTypeDescriptor();
}
return ParserRuleContext.FUNC_BODY;
case EXPRESSION:
case BASIC_LITERAL:
return ParserRuleContext.EXPRESSION_RHS;
case EXTERNAL_KEYWORD:
return ParserRuleContext.SEMICOLON;
case FUNCTION_KEYWORD:
return ParserRuleContext.FUNC_NAME;
case FUNC_NAME:
return ParserRuleContext.OPEN_PARENTHESIS;
case OPEN_BRACE:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.LISTENERS_LIST) {
endContext();
}
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
return ParserRuleContext.CLOSE_BRACE;
}
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) {
return ParserRuleContext.MAPPING_FIELD;
}
return ParserRuleContext.STATEMENT;
case OPEN_PARENTHESIS:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.FUNC_DEFINITION) {
return ParserRuleContext.PARAM_LIST;
}
if (parentCtx == ParserRuleContext.NIL_TYPE_DESCRIPTOR) {
return ParserRuleContext.CLOSE_PARENTHESIS;
}
return ParserRuleContext.ARG;
case RETURNS_KEYWORD:
if (this.tokenReader.peek(nextLookahead).kind != SyntaxKind.RETURNS_KEYWORD) {
return ParserRuleContext.FUNC_BODY;
}
return ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR;
case SEMICOLON:
return getNextRuleForSemicolon(nextLookahead);
case SIMPLE_TYPE_DESCRIPTOR:
return getNextRuleForTypeDescriptor();
case VARIABLE_NAME:
case PARAMETER_RHS:
return getNextRuleForVarName(nextLookahead);
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
return ParserRuleContext.FUNC_DEFINITION;
case FUNC_BODY:
return ParserRuleContext.TOP_LEVEL_NODE;
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
nextToken = this.tokenReader.peek(nextLookahead);
if (isEndOfParametersList(nextToken)) {
endContext();
return ParserRuleContext.CLOSE_PARENTHESIS;
}
return ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR;
case ASSIGNMENT_STMT:
return ParserRuleContext.VARIABLE_NAME;
case COMPOUND_ASSIGNMENT_STMT:
return ParserRuleContext.VARIABLE_NAME;
case VAR_DECL_STMT:
return ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR;
case EXPRESSION_RHS:
return ParserRuleContext.BINARY_OPERATOR;
case BINARY_OPERATOR:
return ParserRuleContext.EXPRESSION;
case COMMA:
return getNextRuleForComma();
case AFTER_PARAMETER_TYPE:
return getNextRuleForParamType();
case MODULE_TYPE_DEFINITION:
return ParserRuleContext.TYPE_KEYWORD;
case CLOSED_RECORD_BODY_END:
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
case CLOSED_RECORD_BODY_START:
startContext(ParserRuleContext.RECORD_FIELD);
return ParserRuleContext.RECORD_FIELD;
case ELLIPSIS:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR || parentCtx == ParserRuleContext.ARG) {
return ParserRuleContext.EXPRESSION;
}
return ParserRuleContext.VARIABLE_NAME;
case QUESTION_MARK:
return getNextRuleForQuestionMark();
case RECORD_KEYWORD:
return ParserRuleContext.RECORD_BODY_START;
case TYPE_KEYWORD:
return ParserRuleContext.TYPE_NAME;
case RECORD_TYPE_DESCRIPTOR:
return ParserRuleContext.RECORD_KEYWORD;
case ASTERISK:
return ParserRuleContext.TYPE_REFERENCE;
case TYPE_NAME:
return ParserRuleContext.TYPE_DESCRIPTOR;
case OBJECT_KEYWORD:
return ParserRuleContext.OPEN_BRACE;
case REMOTE_KEYWORD:
return ParserRuleContext.FUNCTION_KEYWORD;
case OBJECT_TYPE_DESCRIPTOR:
return ParserRuleContext.OBJECT_TYPE_DESCRIPTOR_START;
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
return ParserRuleContext.OBJECT_KEYWORD;
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
return ParserRuleContext.OBJECT_KEYWORD;
case OPEN_BRACKET:
return ParserRuleContext.EXPRESSION;
case CLOSE_BRACKET:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.COMPUTED_FIELD_NAME) {
endContext();
}
return ParserRuleContext.EXPRESSION_RHS;
case FIELD_OR_FUNC_NAME:
return ParserRuleContext.EXPRESSION_RHS;
case DOT:
return getNextRuleForDot();
case IF_KEYWORD:
return ParserRuleContext.EXPRESSION;
case ELSE_KEYWORD:
return ParserRuleContext.ELSE_BODY;
case BLOCK_STMT:
return ParserRuleContext.OPEN_BRACE;
case IF_BLOCK:
return ParserRuleContext.IF_KEYWORD;
case WHILE_BLOCK:
return ParserRuleContext.WHILE_KEYWORD;
case WHILE_KEYWORD:
return ParserRuleContext.EXPRESSION;
case CHECKING_KEYWORD:
return ParserRuleContext.EXPRESSION;
case CALL_STMT:
return ParserRuleContext.CALL_STMT_START;
case PANIC_STMT:
return ParserRuleContext.PANIC_KEYWORD;
case PANIC_KEYWORD:
return ParserRuleContext.EXPRESSION;
case FUNC_CALL:
return ParserRuleContext.IMPORT_PREFIX;
case IMPORT_KEYWORD:
return ParserRuleContext.IMPORT_ORG_OR_MODULE_NAME;
case IMPORT_PREFIX:
return ParserRuleContext.SEMICOLON;
case VERSION_NUMBER:
case VERSION_KEYWORD:
return ParserRuleContext.MAJOR_VERSION;
case SLASH:
return ParserRuleContext.IMPORT_MODULE_NAME;
case IMPORT_ORG_OR_MODULE_NAME:
return ParserRuleContext.IMPORT_DECL_RHS;
case IMPORT_MODULE_NAME:
return ParserRuleContext.AFTER_IMPORT_MODULE_NAME;
case AS_KEYWORD:
return ParserRuleContext.IMPORT_PREFIX;
case MAJOR_VERSION:
case MINOR_VERSION:
case IMPORT_SUB_VERSION:
return ParserRuleContext.MAJOR_MINOR_VERSION_END;
case PATCH_VERSION:
return ParserRuleContext.IMPORT_PREFIX_DECL;
case IMPORT_DECL:
return ParserRuleContext.IMPORT_KEYWORD;
case CONTINUE_STATEMENT:
return ParserRuleContext.CONTINUE_KEYWORD;
case BREAK_STATEMENT:
return ParserRuleContext.BREAK_KEYWORD;
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
return ParserRuleContext.SEMICOLON;
case RETURN_STMT:
return ParserRuleContext.RETURN_KEYWORD;
case RETURN_KEYWORD:
return ParserRuleContext.RETURN_STMT_RHS;
case ACCESS_EXPRESSION:
return ParserRuleContext.VARIABLE_REF;
case MAPPING_FIELD_NAME:
return ParserRuleContext.SPECIFIC_FIELD_RHS;
case COLON:
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) {
return ParserRuleContext.EXPRESSION;
}
return ParserRuleContext.IDENTIFIER;
case STRING_LITERAL:
return ParserRuleContext.COLON;
case COMPUTED_FIELD_NAME:
return ParserRuleContext.OPEN_BRACKET;
case LISTENERS_LIST:
return ParserRuleContext.EXPRESSION;
case ON_KEYWORD:
return ParserRuleContext.LISTENERS_LIST;
case RESOURCE_KEYWORD:
return ParserRuleContext.FUNC_DEFINITION;
case SERVICE_DECL:
return ParserRuleContext.SERVICE_KEYWORD;
case SERVICE_KEYWORD:
return ParserRuleContext.OPTIONAL_SERVICE_NAME;
case SERVICE_NAME:
return ParserRuleContext.ON_KEYWORD;
case LISTENER_KEYWORD:
return ParserRuleContext.TYPE_DESCRIPTOR;
case LISTENER_DECL:
return ParserRuleContext.LISTENER_KEYWORD;
case FINAL_KEYWORD:
return ParserRuleContext.TYPE_DESCRIPTOR;
case CONSTANT_DECL:
return ParserRuleContext.CONST_KEYWORD;
case CONST_KEYWORD:
return ParserRuleContext.CONST_DECL_TYPE;
case CONST_DECL_TYPE:
return ParserRuleContext.CONST_DECL_RHS;
case NIL_TYPE_DESCRIPTOR:
return ParserRuleContext.OPEN_PARENTHESIS;
case TYPEOF_EXPRESSION:
return ParserRuleContext.TYPEOF_KEYWORD;
case TYPEOF_KEYWORD:
return ParserRuleContext.EXPRESSION;
case OPTIONAL_TYPE_DESCRIPTOR:
return ParserRuleContext.TYPE_DESCRIPTOR;
case UNARY_EXPRESSION:
return ParserRuleContext.UNARY_OPERATOR;
case UNARY_OPERATOR:
return ParserRuleContext.EXPRESSION;
case AT:
return ParserRuleContext.ANNOT_REFERENCE;
case DOC_STRING:
return ParserRuleContext.ANNOTATIONS;
case ANNOTATIONS:
return ParserRuleContext.AT;
case MAPPING_CONSTRUCTOR:
return ParserRuleContext.OPEN_BRACE;
case VARIABLE_REF:
case TYPE_REFERENCE:
case ANNOT_REFERENCE:
return ParserRuleContext.QUALIFIED_IDENTIFIER;
case QUALIFIED_IDENTIFIER:
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.COLON_TOKEN) {
return ParserRuleContext.COLON;
}
case IDENTIFIER:
parentCtx = getParentContext();
endContext();
switch (parentCtx) {
case VARIABLE_REF:
return ParserRuleContext.EXPRESSION_RHS;
case TYPE_REFERENCE:
return ParserRuleContext.SEMICOLON;
case ANNOT_REFERENCE:
return ParserRuleContext.MAPPING_CONSTRUCTOR;
default:
throw new IllegalStateException();
}
case IS_KEYWORD:
return ParserRuleContext.TYPE_DESCRIPTOR;
case IS_EXPRESSION:
return ParserRuleContext.EXPRESSION_RHS;
case DECIMAL_INTEGER_LITERAL:
case OBJECT_FUNC_OR_FIELD:
case OBJECT_METHOD_START:
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
case OBJECT_FIELD_RHS:
case PARAM_LIST:
case ARG:
case ARG_LIST:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case ASSIGNMENT_OR_VAR_DECL_STMT_RHS:
case BOOLEAN_LITERAL:
case CALL_STMT_START:
case ELSE_BLOCK:
case ELSE_BODY:
case FIELD_DESCRIPTOR_RHS:
case FIELD_OR_REST_DESCIPTOR_RHS:
case IMPORT_PREFIX_DECL:
case NAMED_OR_POSITIONAL_ARG_RHS:
case OBJECT_MEMBER:
case OBJECT_TYPE_DESCRIPTOR_START:
case RECORD_BODY_END:
case RECORD_BODY_START:
case RECORD_FIELD:
case STATEMENT_START_IDENTIFIER:
case TOP_LEVEL_NODE_WITHOUT_METADATA:
case TYPE_DESCRIPTOR:
case VAR_DECL_STMT_RHS:
case AFTER_IMPORT_MODULE_NAME:
case IMPORT_DECL_RHS:
case IMPORT_VERSION_DECL:
case MAJOR_MINOR_VERSION_END:
case MAPPING_FIELD:
case SPECIFIC_FIELD_RHS:
case RETURN_STMT_RHS:
case OPTIONAL_SERVICE_NAME:
case RESOURCE_DEF:
case CONST_DECL_RHS:
case OBJECT_MEMBER_WITHOUT_METADATA:
case TOP_LEVEL_NODE:
default:
throw new IllegalStateException("cannot find the next rule for: " + currentCtx);
}
} | class BallerinaParserErrorHandler {
private final AbstractTokenReader tokenReader;
private final BallerinaParserErrorListener errorListener;
private final BallerinaParser parser;
private ArrayDeque<ParserRuleContext> ctxStack = new ArrayDeque<>();
/**
* Two or more rules which's left side of the production is same (has alternative paths).
* eg : FUNC_BODIES --> FUNC_BODY_BLOCK
* FUNC_BODIES --> EXTERNAL_FUNC_BODY
*/
private static final ParserRuleContext[] FUNC_BODIES =
{ ParserRuleContext.FUNC_BODY_BLOCK, ParserRuleContext.EXTERNAL_FUNC_BODY };
private static final ParserRuleContext[] STATEMENTS = { ParserRuleContext.CLOSE_BRACE,
ParserRuleContext.ASSIGNMENT_STMT, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.IF_BLOCK,
ParserRuleContext.WHILE_BLOCK, ParserRuleContext.CALL_STMT, ParserRuleContext.PANIC_STMT,
ParserRuleContext.CONTINUE_STATEMENT, ParserRuleContext.BREAK_STATEMENT, ParserRuleContext.RETURN_STMT,
ParserRuleContext.COMPOUND_ASSIGNMENT_STMT };
private static final ParserRuleContext[] VAR_DECL_RHS =
{ ParserRuleContext.SEMICOLON, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] PARAMETER_RHS = { ParserRuleContext.COMMA, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] TOP_LEVEL_NODE =
{ ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD,
ParserRuleContext.FUNC_DEFINITION, ParserRuleContext.MODULE_TYPE_DEFINITION,
ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL,
ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.EOF };
private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_METADATA =
new ParserRuleContext[] { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.FUNC_DEFINITION,
ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL,
ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL,
ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.EOF };
private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_MODIFIER =
{ ParserRuleContext.FUNC_DEFINITION, ParserRuleContext.MODULE_TYPE_DEFINITION,
ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL,
ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.EOF };
private static final ParserRuleContext[] TYPE_OR_VAR_NAME =
{ ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN =
{ ParserRuleContext.ASSIGN_OP, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] FIELD_DESCRIPTOR_RHS =
{ ParserRuleContext.SEMICOLON, ParserRuleContext.QUESTION_MARK, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] FIELD_OR_REST_DESCIPTOR_RHS =
{ ParserRuleContext.ELLIPSIS, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] RECORD_BODY_START =
{ ParserRuleContext.CLOSED_RECORD_BODY_START, ParserRuleContext.OPEN_BRACE };
private static final ParserRuleContext[] RECORD_BODY_END =
{ ParserRuleContext.CLOSED_RECORD_BODY_END, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] TYPE_DESCRIPTORS =
{ ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR, ParserRuleContext.RECORD_TYPE_DESCRIPTOR,
ParserRuleContext.OBJECT_TYPE_DESCRIPTOR, ParserRuleContext.NIL_TYPE_DESCRIPTOR };
private static final ParserRuleContext[] RECORD_FIELD =
{ ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESCRIPTOR };
private static final ParserRuleContext[] RECORD_FIELD_WITHOUT_METADATA =
{ ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESCRIPTOR };
private static final ParserRuleContext[] ARG_START =
{ ParserRuleContext.VARIABLE_NAME, ParserRuleContext.ELLIPSIS, ParserRuleContext.EXPRESSION };
private static final ParserRuleContext[] NAMED_OR_POSITIONAL_ARG_RHS =
{ ParserRuleContext.COMMA, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] PARAM_LIST =
{ ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.REQUIRED_PARAM };
private static final ParserRuleContext[] OBJECT_FIELD_RHS =
{ ParserRuleContext.SEMICOLON, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] OBJECT_MEMBER_START =
{ ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK,
ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] OBJECT_MEMBER_WITHOUT_METADATA =
{ ParserRuleContext.ASTERISK, ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD = { ParserRuleContext.PUBLIC_KEYWORD,
ParserRuleContext.PRIVATE_KEYWORD, ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY };
private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY =
{ ParserRuleContext.TYPE_DESCRIPTOR, ParserRuleContext.OBJECT_METHOD_START };
private static final ParserRuleContext[] OBJECT_METHOD_START =
{ ParserRuleContext.REMOTE_KEYWORD, ParserRuleContext.FUNCTION_KEYWORD };
private static final ParserRuleContext[] OBJECT_TYPE_DESCRIPTOR_START =
{ ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER, ParserRuleContext.OBJECT_KEYWORD };
private static final ParserRuleContext[] ELSE_BODY = { ParserRuleContext.IF_BLOCK, ParserRuleContext.OPEN_BRACE };
private static final ParserRuleContext[] ELSE_BLOCK =
{ ParserRuleContext.ELSE_KEYWORD, ParserRuleContext.STATEMENT };
private static final ParserRuleContext[] CALL_STATEMENT =
{ ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] IMPORT_PREFIX_DECL =
{ ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] IMPORT_VERSION =
{ ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] IMPORT_DECL_RHS = { ParserRuleContext.SLASH, ParserRuleContext.DOT,
ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] AFTER_IMPORT_MODULE_NAME = { ParserRuleContext.DOT,
ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] MAJOR_MINOR_VERSION_END =
{ ParserRuleContext.DOT, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] RETURN_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXPRESSION };
private static final ParserRuleContext[] EXPRESSIONS = { ParserRuleContext.BASIC_LITERAL,
ParserRuleContext.VARIABLE_REF, ParserRuleContext.ACCESS_EXPRESSION, ParserRuleContext.TYPEOF_EXPRESSION,
ParserRuleContext.UNARY_EXPRESSION, ParserRuleContext.IS_EXPRESSION };
private static final ParserRuleContext[] MAPPING_FIELD_START = { ParserRuleContext.MAPPING_FIELD_NAME,
ParserRuleContext.STRING_LITERAL, ParserRuleContext.COMPUTED_FIELD_NAME, ParserRuleContext.ELLIPSIS };
private static final ParserRuleContext[] SPECIFIC_FIELD_RHS =
{ ParserRuleContext.COLON, ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS };
private static final ParserRuleContext[] OPTIONAL_SERVICE_NAME =
{ ParserRuleContext.SERVICE_NAME, ParserRuleContext.ON_KEYWORD };
private static final ParserRuleContext[] RESOURCE_DEF_START =
{ ParserRuleContext.RESOURCE_KEYWORD, ParserRuleContext.FUNC_DEFINITION, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] CONST_DECL_RHS =
{ ParserRuleContext.STATEMENT_START_IDENTIFIER, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] PARAMETER =
{ ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESCRIPTOR };
private static final ParserRuleContext[] PARAMETER_WITHOUT_ANNOTS =
{ ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESCRIPTOR };
/**
* Limit for the distance to travel, to determine a successful lookahead.
*/
private int lookaheadLimit = 5;
public BallerinaParserErrorHandler(AbstractTokenReader tokenReader, BallerinaParser parser) {
this.tokenReader = tokenReader;
this.parser = parser;
this.errorListener = new BallerinaParserErrorListener();
}
public void startContext(ParserRuleContext context) {
this.ctxStack.push(context);
}
public void endContext() {
this.ctxStack.pop();
}
public void switchContext(ParserRuleContext context) {
this.ctxStack.pop();
this.ctxStack.push(context);
}
public void reportInvalidNode(STToken startingToken, String message) {
this.errorListener.reportInvalidNodeError(startingToken, message);
}
public void reportMissingTokenError(String message) {
STToken currentToken = this.tokenReader.head();
this.errorListener.reportMissingTokenError(currentToken, message);
}
private ParserRuleContext getParentContext() {
return this.ctxStack.peek();
}
/*
* -------------- Error recovering --------------
*/
/**
* Recover from current context. Returns the action needs to be taken with respect
* to the next token, in order to recover. This method will search for the most
* optimal action, that will result the parser to proceed the farthest distance.
*
* @param nextToken Next token of the input where the error occurred
* @param currentCtx Current parser context
* @param args Arguments that requires to continue parsing from the given parser context
* @return The action needs to be taken for the next token, in order to recover
*/
public Solution recover(ParserRuleContext currentCtx, STToken nextToken, Object... args) {
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
SyntaxKind expectedTokenKind = getExpectedTokenKind(currentCtx);
Solution fix = new Solution(Action.INSERT, currentCtx, expectedTokenKind, currentCtx.toString());
applyFix(currentCtx, fix, args);
return fix;
}
Result bestMatch = seekMatch(currentCtx);
if (bestMatch.matches > 0) {
Solution sol = bestMatch.solution;
applyFix(currentCtx, sol, args);
return sol;
} else {
removeInvalidToken();
Solution sol = new Solution(Action.REMOVE, currentCtx, nextToken.kind, nextToken.toString());
sol.recoveredNode = this.parser.resumeParsing(currentCtx, args);
return sol;
}
}
/**
* Remove the invalid token. This method assumes that the next immediate token
* of the token input stream is the culprit.
*/
public void removeInvalidToken() {
STToken invalidToken = this.tokenReader.read();
this.errorListener.reportInvalidToken(invalidToken);
}
/**
* Apply the fix to the current context.
*
* @param currentCtx Current context
* @param fix Fix to apply
* @param args Arguments that requires to continue parsing from the given parser context
*/
private void applyFix(ParserRuleContext currentCtx, Solution fix, Object... args) {
if (fix.action == Action.REMOVE) {
removeInvalidToken();
fix.recoveredNode = this.parser.resumeParsing(currentCtx, args);
} else {
fix.recoveredNode = handleMissingToken(currentCtx, fix);
}
}
/**
* Handle a missing token scenario.
*
* @param currentCtx Current context
* @param fix Solution to recover from the missing token
*/
private STNode handleMissingToken(ParserRuleContext currentCtx, Solution fix) {
if (!isProductionWithAlternatives(currentCtx)) {
reportMissingTokenError("missing " + fix.ctx);
}
return STNodeFactory.createMissingToken(fix.tokenKind);
}
/**
* Get a snapshot of the current context stack.
*
* @return Snapshot of the current context stack
*/
private ArrayDeque<ParserRuleContext> getCtxStackSnapshot() {
return this.ctxStack.clone();
}
private boolean isProductionWithAlternatives(ParserRuleContext currentCtx) {
switch (currentCtx) {
case TOP_LEVEL_NODE:
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
case TOP_LEVEL_NODE_WITHOUT_METADATA:
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case FUNC_BODY:
case VAR_DECL_STMT_RHS:
case EXPRESSION_RHS:
case PARAMETER_RHS:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case AFTER_PARAMETER_TYPE:
case FIELD_DESCRIPTOR_RHS:
case RECORD_BODY_START:
case RECORD_BODY_END:
case TYPE_DESCRIPTOR:
case NAMED_OR_POSITIONAL_ARG_RHS:
case OBJECT_FIELD_RHS:
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
case OBJECT_MEMBER:
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
case ELSE_BODY:
case IMPORT_DECL_RHS:
case IMPORT_SUB_VERSION:
case VERSION_NUMBER:
case IMPORT_VERSION_DECL:
case IMPORT_PREFIX_DECL:
case MAPPING_FIELD:
case SPECIFIC_FIELD_RHS:
case RESOURCE_DEF:
case PARAMETER_WITHOUT_ANNOTS:
case PARAMETER:
return true;
default:
return false;
}
}
/*
* seekMatch methods
*/
/**
* Start a fresh search for a way to recover with the next immediate token (peek(1), and the current context).
*
* @param currentCtx Current parser context
* @return Recovery result
*/
private Result seekMatch(ParserRuleContext currentCtx) {
return seekMatchInSubTree(currentCtx, 1, 0);
}
/**
* Search for a solution in a sub-tree/sub-path. This will take a snapshot of the current context stack
* and will operate on top of it, so that the original state of the parser will not be disturbed. On return
* the previous state of the parser contexts will be restored.
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, from the position of the original error.
* @param currentDepth Amount of distance traveled so far.
* @return Recovery result
*/
private Result seekMatchInSubTree(ParserRuleContext currentCtx, int lookahead, int currentDepth) {
ArrayDeque<ParserRuleContext> tempCtxStack = this.ctxStack;
this.ctxStack = getCtxStackSnapshot();
Result result = seekMatch(currentCtx, lookahead, currentDepth);
result.ctx = currentCtx;
this.ctxStack = tempCtxStack;
return result;
}
/**
* TODO: This is a duplicate method. Same as {@link BallerinaParser
*
* @param token
* @return
*/
private boolean isEndOfBlock(STToken token) {
ParserRuleContext enclosingContext = getParentContext();
switch (enclosingContext) {
case OBJECT_TYPE_DESCRIPTOR:
case SERVICE_DECL:
switch (token.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
return true;
default:
return false;
}
case BLOCK_STMT:
switch (token.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case ELSE_KEYWORD:
return true;
default:
return false;
}
default:
switch (token.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case SERVICE_KEYWORD:
case RESOURCE_KEYWORD:
return true;
default:
return false;
}
}
}
private boolean isEndOfObjectTypeNode(int nextLookahead) {
STToken nextToken = this.tokenReader.peek(nextLookahead);
switch (nextToken.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case SERVICE_KEYWORD:
return true;
default:
STToken nextNextToken = this.tokenReader.peek(nextLookahead + 1);
switch (nextNextToken.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case SERVICE_KEYWORD:
return true;
default:
return false;
}
}
}
private boolean isEndOfParametersList(STToken token) {
switch (token.kind) {
case OPEN_BRACE_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RETURNS_KEYWORD:
return true;
default:
return false;
}
}
private boolean isEndOfParameter(STToken token) {
switch (token.kind) {
case OPEN_BRACE_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case COMMA_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RETURNS_KEYWORD:
return true;
default:
return false;
}
}
/**
* Search for a solution.
* Terminals are directly matched and Non-terminals which have alternative productions are seekInAlternativesPaths()
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, relative to the position of the original error.
* @param currentDepth Amount of distance traveled so far.
* @return Recovery result
*/
private Result seekMatch(ParserRuleContext currentCtx, int lookahead, int currentDepth) {
boolean hasMatch;
boolean skipRule;
int matchingRulesCount = 0;
boolean isEntryPoint = true;
while (currentDepth < lookaheadLimit) {
hasMatch = true;
skipRule = false;
STToken nextToken = this.tokenReader.peek(lookahead);
switch (currentCtx) {
case EOF:
hasMatch = nextToken.kind == SyntaxKind.EOF_TOKEN;
break;
case PUBLIC_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.PUBLIC_KEYWORD;
break;
case PRIVATE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.PRIVATE_KEYWORD;
break;
case REMOTE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.REMOTE_KEYWORD;
break;
case TOP_LEVEL_NODE:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, TOP_LEVEL_NODE);
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
TOP_LEVEL_NODE_WITHOUT_MODIFIER);
case TOP_LEVEL_NODE_WITHOUT_METADATA:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
TOP_LEVEL_NODE_WITHOUT_METADATA);
case FUNCTION_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.FUNCTION_KEYWORD;
break;
case FUNC_NAME:
case VARIABLE_NAME:
case TYPE_NAME:
case FIELD_OR_FUNC_NAME:
case IMPORT_ORG_OR_MODULE_NAME:
case IMPORT_MODULE_NAME:
case IMPORT_PREFIX:
case MAPPING_FIELD_NAME:
case SERVICE_NAME:
case QUALIFIED_IDENTIFIER:
case IDENTIFIER:
hasMatch = nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN;
break;
case OPEN_PARENTHESIS:
hasMatch = nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN;
break;
case CLOSE_PARENTHESIS:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN;
break;
case RETURNS_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RETURNS_KEYWORD;
if (!hasMatch) {
skipRule = true;
}
break;
case SIMPLE_TYPE_DESCRIPTOR:
case CONST_DECL_TYPE:
hasMatch =
nextToken.kind == SyntaxKind.SIMPLE_TYPE || nextToken.kind == SyntaxKind.SERVICE_KEYWORD ||
nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN;
break;
case FUNC_BODY:
return seekInFuncBodies(lookahead, currentDepth, matchingRulesCount);
case OPEN_BRACE:
hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_TOKEN;
break;
case CLOSE_BRACE:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN;
break;
case ASSIGN_OP:
hasMatch = nextToken.kind == SyntaxKind.EQUAL_TOKEN;
break;
case EXTERNAL_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.EXTERNAL_KEYWORD;
break;
case SEMICOLON:
hasMatch = nextToken.kind == SyntaxKind.SEMICOLON_TOKEN;
break;
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
if (isEndOfBlock(nextToken)) {
skipRule = true;
break;
}
return seekInStatements(currentCtx, nextToken, lookahead, currentDepth, matchingRulesCount);
case BINARY_OPERATOR:
hasMatch = isBinaryOperator(nextToken);
break;
case EXPRESSION:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, EXPRESSIONS);
case VAR_DECL_STMT_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, VAR_DECL_RHS);
case EXPRESSION_RHS:
return seekMatchInExpressionRhs(nextToken, lookahead, currentDepth, matchingRulesCount);
case COMMA:
hasMatch = nextToken.kind == SyntaxKind.COMMA_TOKEN;
break;
case PARAM_LIST:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, PARAM_LIST);
case PARAMETER_RHS:
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case REQUIRED_PARAM:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, PARAMETER_RHS);
case DEFAULTABLE_PARAM:
case REST_PARAM:
skipRule = true;
break;
default:
throw new IllegalStateException();
}
break;
case STATEMENT_START_IDENTIFIER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, TYPE_OR_VAR_NAME);
case ASSIGNMENT_OR_VAR_DECL_STMT_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN);
case CLOSED_RECORD_BODY_END:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN;
break;
case CLOSED_RECORD_BODY_START:
hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN;
break;
case ELLIPSIS:
hasMatch = nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN;
break;
case QUESTION_MARK:
hasMatch = nextToken.kind == SyntaxKind.QUESTION_MARK_TOKEN;
break;
case RECORD_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RECORD_KEYWORD;
break;
case TYPE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.TYPE_KEYWORD;
break;
case FIELD_DESCRIPTOR_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, FIELD_DESCRIPTOR_RHS);
case FIELD_OR_REST_DESCIPTOR_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
FIELD_OR_REST_DESCIPTOR_RHS);
case RECORD_BODY_END:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RECORD_BODY_END);
case RECORD_BODY_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RECORD_BODY_START);
case TYPE_DESCRIPTOR:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, TYPE_DESCRIPTORS);
case RECORD_FIELD:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RECORD_FIELD);
case RECORD_FIELD_WITHOUT_METADATA:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
RECORD_FIELD_WITHOUT_METADATA);
case ARG:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, ARG_START);
case NAMED_OR_POSITIONAL_ARG_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
NAMED_OR_POSITIONAL_ARG_RHS);
case OBJECT_MEMBER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_MEMBER_START);
case OBJECT_MEMBER_WITHOUT_METADATA:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
OBJECT_MEMBER_WITHOUT_METADATA);
case OBJECT_FIELD_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_FIELD_RHS);
case OBJECT_METHOD_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_METHOD_START);
case OBJECT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.OBJECT_KEYWORD;
break;
case OBJECT_FUNC_OR_FIELD:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_FUNC_OR_FIELD);
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY);
case OBJECT_TYPE_DESCRIPTOR_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
OBJECT_TYPE_DESCRIPTOR_START);
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
if (currentDepth == 0) {
hasMatch = false;
break;
}
hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD ||
nextToken.kind == SyntaxKind.CLIENT_KEYWORD;
break;
case ABSTRACT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD;
break;
case CLIENT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CLIENT_KEYWORD;
break;
case OPEN_BRACKET:
hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN;
break;
case CLOSE_BRACKET:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACKET_TOKEN;
break;
case DOT:
hasMatch = nextToken.kind == SyntaxKind.DOT_TOKEN;
break;
case IF_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.IF_KEYWORD;
break;
case ELSE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.ELSE_KEYWORD;
break;
case ELSE_BLOCK:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, ELSE_BLOCK);
case ELSE_BODY:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, ELSE_BODY);
case WHILE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.WHILE_KEYWORD;
break;
case CHECKING_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CHECK_KEYWORD ||
nextToken.kind == SyntaxKind.CHECKPANIC_KEYWORD;
break;
case CALL_STMT_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, CALL_STATEMENT);
case PANIC_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.PANIC_KEYWORD;
break;
case AS_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.AS_KEYWORD;
break;
case BOOLEAN_LITERAL:
hasMatch = nextToken.kind == SyntaxKind.TRUE_KEYWORD || nextToken.kind == SyntaxKind.FALSE_KEYWORD;
break;
case DECIMAL_INTEGER_LITERAL:
case MAJOR_VERSION:
case MINOR_VERSION:
case PATCH_VERSION:
hasMatch = nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL;
break;
case IMPORT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.IMPORT_KEYWORD;
break;
case SLASH:
hasMatch = nextToken.kind == SyntaxKind.SLASH_TOKEN;
break;
case VERSION_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.VERSION_KEYWORD;
break;
case CONTINUE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CONTINUE_KEYWORD;
break;
case BREAK_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.BREAK_KEYWORD;
break;
case IMPORT_PREFIX_DECL:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, IMPORT_PREFIX_DECL);
case IMPORT_VERSION_DECL:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, IMPORT_VERSION);
case IMPORT_DECL_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, IMPORT_DECL_RHS);
case AFTER_IMPORT_MODULE_NAME:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
AFTER_IMPORT_MODULE_NAME);
case MAJOR_MINOR_VERSION_END:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
MAJOR_MINOR_VERSION_END);
case RETURN_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RETURN_KEYWORD;
break;
case RETURN_STMT_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RETURN_RHS);
case ACCESS_EXPRESSION:
return seekInAccessExpression(currentCtx, lookahead, currentDepth, matchingRulesCount);
case BASIC_LITERAL:
hasMatch = isBasicLiteral(nextToken.kind);
break;
case COLON:
hasMatch = nextToken.kind == SyntaxKind.COLON_TOKEN;
break;
case STRING_LITERAL:
hasMatch = nextToken.kind == SyntaxKind.STRING_LITERAL;
break;
case MAPPING_FIELD:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, MAPPING_FIELD_START);
case SPECIFIC_FIELD_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, SPECIFIC_FIELD_RHS);
case SERVICE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.SERVICE_KEYWORD;
break;
case ON_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.ON_KEYWORD;
break;
case OPTIONAL_SERVICE_NAME:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OPTIONAL_SERVICE_NAME);
case RESOURCE_DEF:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RESOURCE_DEF_START);
case RESOURCE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RESOURCE_KEYWORD;
break;
case LISTENER_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.LISTENER_KEYWORD;
break;
case CONST_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CONST_KEYWORD;
break;
case FINAL_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.FINAL_KEYWORD;
break;
case CONST_DECL_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, CONST_DECL_RHS);
case TYPEOF_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.TYPEOF_KEYWORD;
break;
case UNARY_OPERATOR:
hasMatch = isUnaryOperator(nextToken);
break;
case AT:
hasMatch = nextToken.kind == SyntaxKind.AT_TOKEN;
break;
case PARAMETER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, PARAMETER);
case PARAMETER_WITHOUT_ANNOTS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
PARAMETER_WITHOUT_ANNOTS);
case IS_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.IS_KEYWORD;
break;
case IS_EXPRESSION:
return seekInIsExpression(currentCtx, lookahead, currentDepth, matchingRulesCount);
case COMP_UNIT:
case FUNC_DEFINITION:
case RETURN_TYPE_DESCRIPTOR:
case EXTERNAL_FUNC_BODY:
case FUNC_BODY_BLOCK:
case ASSIGNMENT_STMT:
case VAR_DECL_STMT:
case REQUIRED_PARAM:
case AFTER_PARAMETER_TYPE:
case DEFAULTABLE_PARAM:
case REST_PARAM:
case MODULE_TYPE_DEFINITION:
case ARG_LIST:
case ASTERISK:
case FUNC_CALL:
case RECORD_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case CALL_STMT:
case IF_BLOCK:
case BLOCK_STMT:
case WHILE_BLOCK:
case VERSION_NUMBER:
case IMPORT_DECL:
case IMPORT_SUB_VERSION:
case MAPPING_CONSTRUCTOR:
case PANIC_STMT:
case COMPUTED_FIELD_NAME:
case RETURN_STMT:
case LISTENERS_LIST:
case SERVICE_DECL:
case BREAK_STATEMENT:
case CONTINUE_STATEMENT:
case LISTENER_DECL:
case CONSTANT_DECL:
case NIL_TYPE_DESCRIPTOR:
case OPTIONAL_TYPE_DESCRIPTOR:
case ANNOTATIONS:
case DOC_STRING:
case VARIABLE_REF:
case TYPE_REFERENCE:
case ANNOT_REFERENCE:
default:
skipRule = true;
hasMatch = true;
break;
}
if (!hasMatch) {
Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth + 1);
if (isEntryPoint) {
fixedPathResult.solution = fixedPathResult.fixes.peek();
} else {
fixedPathResult.solution = new Solution(Action.KEEP, currentCtx, getExpectedTokenKind(currentCtx),
currentCtx.toString());
}
return getFinalResult(matchingRulesCount, fixedPathResult);
}
currentCtx = getNextRule(currentCtx, lookahead + 1);
if (!skipRule) {
currentDepth++;
matchingRulesCount++;
lookahead++;
isEntryPoint = false;
}
}
Result result = new Result(new ArrayDeque<>(), matchingRulesCount, currentCtx);
result.solution =
new Solution(Action.KEEP, currentCtx, getExpectedTokenKind(currentCtx), currentCtx.toString());
return result;
}
/**
* Search for matching token sequences within the function body signatures and returns the most optimal solution.
* This will check whether the token stream best matches to a 'function-body-block' or a 'external-function-body'.
*
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInFuncBodies(int lookahead, int currentDepth, int currentMatches) {
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, FUNC_BODIES);
}
/**
* Search for matching token sequences within different kinds of statements and returns the most optimal solution.
*
* @param currentCtx Current context
* @param nextToken Next token in the token stream
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInStatements(ParserRuleContext currentCtx, STToken nextToken, int lookahead, int currentDepth,
int currentMatches) {
if (nextToken.kind == SyntaxKind.SEMICOLON_TOKEN) {
Result result = seekMatchInSubTree(ParserRuleContext.STATEMENT, lookahead + 1, currentDepth);
result.fixes.push(new Solution(Action.REMOVE, currentCtx, nextToken.kind, nextToken.toString()));
return getFinalResult(currentMatches, result);
}
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, STATEMENTS);
}
/**
* Search for matching token sequences within access expressions and returns the most optimal solution.
* Access expression can be one of: method-call, field-access, member-access.
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInAccessExpression(ParserRuleContext currentCtx, int lookahead, int currentDepth,
int currentMatches) {
STToken nextToken = this.tokenReader.peek(lookahead);
currentDepth++;
if (nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) {
Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth);
return getFinalResult(currentMatches, fixedPathResult);
}
ParserRuleContext nextContext;
STToken nextNextToken = this.tokenReader.peek(lookahead + 1);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
nextContext = ParserRuleContext.OPEN_PARENTHESIS;
break;
case DOT_TOKEN:
nextContext = ParserRuleContext.DOT;
break;
case OPEN_BRACKET_TOKEN:
nextContext = ParserRuleContext.OPEN_BRACKET;
break;
default:
nextContext = ParserRuleContext.EXPRESSION_RHS;
break;
}
currentMatches++;
lookahead++;
Result result = seekMatch(nextContext, lookahead, currentDepth);
result.ctx = currentCtx;
return getFinalResult(currentMatches, result);
}
/**
* Search for a match in rhs of an expression. RHS of an expression can be the end
* of the expression or the rhs of a binary expression.
*
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @return Recovery result
*/
private Result seekMatchInExpressionRhs(STToken nextToken, int lookahead, int currentDepth, int currentMatches) {
ParserRuleContext parentCtx = getParentContext();
if (isParameter(parentCtx) || parentCtx == ParserRuleContext.ARG) {
ParserRuleContext[] next = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT,
ParserRuleContext.OPEN_BRACKET, ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) {
ParserRuleContext[] next = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT,
ParserRuleContext.OPEN_BRACKET, ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACE };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
if (parentCtx == ParserRuleContext.COMPUTED_FIELD_NAME) {
ParserRuleContext[] next = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.BINARY_OPERATOR,
ParserRuleContext.DOT, ParserRuleContext.OPEN_BRACKET };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
if (parentCtx == ParserRuleContext.LISTENERS_LIST) {
ParserRuleContext[] next = { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR,
ParserRuleContext.DOT, ParserRuleContext.OPEN_BRACKET, ParserRuleContext.OPEN_BRACE };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
ParserRuleContext nextContext;
if (parentCtx == ParserRuleContext.IF_BLOCK || parentCtx == ParserRuleContext.WHILE_BLOCK) {
nextContext = ParserRuleContext.BLOCK_STMT;
} else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.RECORD_FIELD ||
parentCtx == ParserRuleContext.OBJECT_MEMBER || parentCtx == ParserRuleContext.LISTENER_DECL ||
parentCtx == ParserRuleContext.CONSTANT_DECL) {
nextContext = ParserRuleContext.SEMICOLON;
} else if (parentCtx == ParserRuleContext.ANNOTATIONS) {
nextContext = ParserRuleContext.TOP_LEVEL_NODE;
} else {
throw new IllegalStateException();
}
ParserRuleContext[] alternatives = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT,
ParserRuleContext.OPEN_BRACKET, ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.IS_KEYWORD,
nextContext };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, alternatives);
}
/**
* Search for matching token sequences within the given alternative paths, and find the most optimal solution.
*
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInAlternativesPaths(int lookahead, int currentDepth, int currentMatches,
ParserRuleContext[] alternativeRules) {
@SuppressWarnings("unchecked")
List<Result>[] results = new List[lookaheadLimit];
int bestMatchIndex = 0;
for (ParserRuleContext rule : alternativeRules) {
Result result = seekMatchInSubTree(rule, lookahead, currentDepth);
List<Result> similarResutls = results[result.matches];
if (similarResutls == null) {
similarResutls = new ArrayList<>(lookaheadLimit);
results[result.matches] = similarResutls;
if (bestMatchIndex < result.matches) {
bestMatchIndex = result.matches;
}
}
similarResutls.add(result);
}
if (bestMatchIndex == 0) {
return new Result(new ArrayDeque<>(), currentMatches, alternativeRules[0]);
}
List<Result> bestMatches = results[bestMatchIndex];
Result bestMatch = bestMatches.get(0);
Result currentMatch;
for (int i = 1; i < bestMatches.size(); i++) {
currentMatch = bestMatches.get(i);
int currentMatchFixesSize = currentMatch.fixes.size();
int bestmatchFixesSize = bestMatch.fixes.size();
if (currentMatchFixesSize == bestmatchFixesSize) {
if (bestmatchFixesSize == 0) {
continue;
}
Solution currentSol = bestMatch.fixes.peek();
Solution foundSol = currentMatch.fixes.peek();
if (currentSol.action == Action.REMOVE && foundSol.action == Action.INSERT) {
bestMatch = currentMatch;
}
}
if (currentMatchFixesSize < bestmatchFixesSize) {
bestMatch = currentMatch;
}
}
return getFinalResult(currentMatches, bestMatch);
}
/**
* Combine a given result with the current results, and get the final result.
*
* @param currentMatches Matches found so far
* @param bestMatch Result found in the sub-tree, that requires to be merged with the current results
* @return Final result
*/
private Result getFinalResult(int currentMatches, Result bestMatch) {
bestMatch.matches += currentMatches;
return bestMatch;
}
/**
* <p>
* Fix the error at the current position and continue forward to find the best path. This method
* tries to fix the parser error using following steps:
* <ol>
* <li>
* Insert a token and see how far the parser can proceed.
* </li>
* <li>
* Delete a token and see how far the parser can proceed.
* </li>
* </ol>
*
* Then decides the best action to perform (whether to insert or remove a token), using the result
* of the above two steps, based on the following criteria:
* <ol>
* <li>
* Pick the solution with the longest matching sequence.
* </li>
* <li>
* If there's a tie, then check for the solution which requires the lowest number of 'fixes'.
* </li>
* <li>
* If there's a tie, then give priority for the 'insertion' as that doesn't require removing
* an input a user has given.
* </li>
* </ol>
* </p>
*
* @param currentCtx Current parser context
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @return Recovery result
*/
private Result fixAndContinue(ParserRuleContext currentCtx, int lookahead, int currentDepth) {
Result deletionResult = seekMatchInSubTree(currentCtx, lookahead + 1, currentDepth);
ParserRuleContext nextCtx = getNextRule(currentCtx, lookahead);
Result insertionResult = seekMatchInSubTree(nextCtx, lookahead, currentDepth);
Result fixedPathResult;
Solution action;
if (insertionResult.matches == 0 && deletionResult.matches == 0) {
fixedPathResult = insertionResult;
} else if (insertionResult.matches == deletionResult.matches) {
if (insertionResult.fixes.size() <= deletionResult.fixes.size()) {
action = new Solution(Action.INSERT, currentCtx, getExpectedTokenKind(currentCtx),
currentCtx.toString());
insertionResult.fixes.push(action);
fixedPathResult = insertionResult;
} else {
STToken token = this.tokenReader.peek(lookahead);
action = new Solution(Action.REMOVE, currentCtx, token.kind, token.toString());
deletionResult.fixes.push(action);
fixedPathResult = deletionResult;
}
} else if (insertionResult.matches > deletionResult.matches) {
action = new Solution(Action.INSERT, currentCtx, getExpectedTokenKind(currentCtx), currentCtx.toString());
insertionResult.fixes.push(action);
fixedPathResult = insertionResult;
} else {
STToken token = this.tokenReader.peek(lookahead);
action = new Solution(Action.REMOVE, currentCtx, token.kind, token.toString());
deletionResult.fixes.push(action);
fixedPathResult = deletionResult;
}
return fixedPathResult;
}
/**
* Get the next parser rule/context given the current parser context.
*
* @param currentCtx Current parser context
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForParamType() {
ParserRuleContext parentCtx;
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.REQUIRED_PARAM || parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) {
return ParserRuleContext.VARIABLE_NAME;
} else if (parentCtx == ParserRuleContext.REST_PARAM) {
return ParserRuleContext.ELLIPSIS;
} else {
throw new IllegalStateException();
}
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForComma() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case PARAM_LIST:
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
endContext();
return parentCtx;
case ARG:
return parentCtx;
case MAPPING_CONSTRUCTOR:
return ParserRuleContext.MAPPING_FIELD;
case LISTENERS_LIST:
return ParserRuleContext.EXPRESSION;
default:
throw new IllegalStateException();
}
}
/**
* Get the next parser context to visit after a type descriptor.
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForTypeDescriptor() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case RECORD_FIELD:
case OBJECT_MEMBER:
case LISTENER_DECL:
case CONSTANT_DECL:
return ParserRuleContext.VARIABLE_NAME;
case MODULE_TYPE_DEFINITION:
return ParserRuleContext.SEMICOLON;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.FUNC_BODY;
case OPTIONAL_TYPE_DESCRIPTOR:
return ParserRuleContext.QUESTION_MARK;
case IS_EXPRESSION:
endContext();
return ParserRuleContext.EXPRESSION_RHS;
default:
if (isStatement(parentCtx) || isParameter(parentCtx)) {
return ParserRuleContext.VARIABLE_NAME;
}
}
throw new IllegalStateException();
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForEqualOp() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case EXTERNAL_FUNC_BODY:
return ParserRuleContext.EXTERNAL_KEYWORD;
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case RECORD_FIELD:
case ARG:
case OBJECT_MEMBER:
case LISTENER_DECL:
case CONSTANT_DECL:
return ParserRuleContext.EXPRESSION;
default:
if (isStatement(parentCtx)) {
return ParserRuleContext.EXPRESSION;
}
throw new IllegalStateException();
}
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
private ParserRuleContext getNextRuleForCloseBrace(int nextLookahead) {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case FUNC_BODY_BLOCK:
endContext();
endContext();
STToken nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.SERVICE_DECL) {
return ParserRuleContext.RESOURCE_DEF;
} else if (parentCtx == ParserRuleContext.OBJECT_TYPE_DESCRIPTOR) {
return ParserRuleContext.OBJECT_MEMBER;
} else {
return ParserRuleContext.TOP_LEVEL_NODE;
}
case SERVICE_DECL:
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
case OBJECT_MEMBER:
endContext();
case RECORD_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR:
endContext();
return getNextRuleForTypeDescriptor();
case BLOCK_STMT:
endContext();
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.IF_BLOCK) {
endContext();
return ParserRuleContext.ELSE_BLOCK;
} else if (parentCtx == ParserRuleContext.WHILE_BLOCK) {
endContext();
return ParserRuleContext.STATEMENT;
}
return ParserRuleContext.STATEMENT;
case MAPPING_CONSTRUCTOR:
endContext();
parentCtx = getParentContext();
if (parentCtx != ParserRuleContext.ANNOTATIONS) {
return ParserRuleContext.EXPRESSION_RHS;
}
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return ParserRuleContext.AT;
}
endContext();
parentCtx = getParentContext();
switch (parentCtx) {
case COMP_UNIT:
return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.TYPE_DESCRIPTOR;
case RECORD_FIELD:
return ParserRuleContext.RECORD_FIELD_WITHOUT_METADATA;
case OBJECT_MEMBER:
return ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA;
case SERVICE_DECL:
return ParserRuleContext.RESOURCE_DEF;
case FUNC_BODY_BLOCK:
return ParserRuleContext.STATEMENT_WITHOUT_ANNOTS;
case EXTERNAL_FUNC_BODY:
return ParserRuleContext.EXTERNAL_KEYWORD;
default:
if (isParameter(parentCtx)) {
return ParserRuleContext.REQUIRED_PARAM;
}
throw new IllegalStateException("annotation is ending inside a " + parentCtx);
}
default:
throw new IllegalStateException("found close-brace in: " + parentCtx);
}
}
/**
* Get the next parser context to visit after a variable/parameter name.
*
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
private ParserRuleContext getNextRuleForVarName(int nextLookahead) {
STToken nextToken = this.tokenReader.peek(nextLookahead);
ParserRuleContext parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.REQUIRED_PARAM) {
if (isEndOfParametersList(nextToken)) {
return ParserRuleContext.CLOSE_PARENTHESIS;
} else if (isEndOfParameter(nextToken)) {
return ParserRuleContext.COMMA;
} else {
switchContext(ParserRuleContext.DEFAULTABLE_PARAM);
if (isCompoundBinaryOperator(nextToken.kind)) {
return ParserRuleContext.COMPOUND_BINARY_OPERATOR;
} else {
return ParserRuleContext.ASSIGN_OP;
}
}
} else if (parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) {
if (isEndOfParametersList(nextToken)) {
return ParserRuleContext.CLOSE_PARENTHESIS;
} else {
return ParserRuleContext.ASSIGN_OP;
}
} else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.LISTENER_DECL ||
parentCtx == ParserRuleContext.CONSTANT_DECL) {
return ParserRuleContext.VAR_DECL_STMT_RHS;
} else if (parentCtx == ParserRuleContext.RECORD_FIELD) {
return ParserRuleContext.FIELD_DESCRIPTOR_RHS;
} else if (parentCtx == ParserRuleContext.ARG) {
return ParserRuleContext.NAMED_OR_POSITIONAL_ARG_RHS;
} else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) {
return ParserRuleContext.OBJECT_FIELD_RHS;
} else {
throw new IllegalStateException();
}
}
/**
* Check whether the given token kind is a compound binary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise
*/
private boolean isCompoundBinaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
return true;
default:
return false;
}
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
private ParserRuleContext getNextRuleForSemicolon(int nextLookahead) {
STToken nextToken;
ParserRuleContext parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.EXTERNAL_FUNC_BODY) {
endContext();
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
} else if (isExpression(parentCtx)) {
endContext();
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
return ParserRuleContext.CLOSE_BRACE;
}
return ParserRuleContext.STATEMENT;
} else if (parentCtx == ParserRuleContext.VAR_DECL_STMT) {
endContext();
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.COMP_UNIT) {
return ParserRuleContext.TOP_LEVEL_NODE;
}
return ParserRuleContext.STATEMENT;
} else if (isStatement(parentCtx)) {
endContext();
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
return ParserRuleContext.CLOSE_BRACE;
}
return ParserRuleContext.STATEMENT;
} else if (parentCtx == ParserRuleContext.RECORD_FIELD) {
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
endContext();
return ParserRuleContext.RECORD_BODY_END;
}
return ParserRuleContext.RECORD_FIELD;
} else if (parentCtx == ParserRuleContext.MODULE_TYPE_DEFINITION ||
parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL) {
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
} else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) {
if (isEndOfObjectTypeNode(nextLookahead)) {
endContext();
return ParserRuleContext.CLOSE_BRACE;
}
return ParserRuleContext.OBJECT_MEMBER;
} else if (parentCtx == ParserRuleContext.IMPORT_DECL) {
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
} else {
throw new IllegalStateException();
}
}
private ParserRuleContext getNextRuleForDot() {
ParserRuleContext parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.IMPORT_DECL) {
return ParserRuleContext.IMPORT_MODULE_NAME;
}
return ParserRuleContext.FIELD_OR_FUNC_NAME;
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForQuestionMark() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case OPTIONAL_TYPE_DESCRIPTOR:
endContext();
parentCtx = getParentContext();
switch (parentCtx) {
case MODULE_TYPE_DEFINITION:
return ParserRuleContext.SEMICOLON;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.FUNC_BODY;
default:
return ParserRuleContext.VARIABLE_NAME;
}
default:
return ParserRuleContext.SEMICOLON;
}
}
/**
* Check whether the given context is a statement.
*
* @param ctx Parser context to check
* @return <code>true</code> if the given context is a statement. <code>false</code> otherwise
*/
private boolean isStatement(ParserRuleContext parentCtx) {
switch (parentCtx) {
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case VAR_DECL_STMT:
case ASSIGNMENT_STMT:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case IF_BLOCK:
case BLOCK_STMT:
case WHILE_BLOCK:
case CALL_STMT:
case PANIC_STMT:
case CONTINUE_STATEMENT:
case BREAK_STATEMENT:
case RETURN_STMT:
case COMPOUND_ASSIGNMENT_STMT:
return true;
default:
return false;
}
}
/**
* Check whether the given context is an expression.
*
* @param ctx Parser context to check
* @return <code>true</code> if the given context is an expression. <code>false</code> otherwise
*/
private boolean isExpression(ParserRuleContext ctx) {
return ctx == ParserRuleContext.EXPRESSION;
}
/**
* Check whether the given token refers to a binary operator.
*
* @param token Token to check
* @return <code>true</code> if the given token refers to a binary operator. <code>false</code> otherwise
*/
private boolean isBinaryOperator(STToken token) {
switch (token.kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
case GT_TOKEN:
case LT_TOKEN:
case EQUAL_GT_TOKEN:
case DOUBLE_EQUAL_TOKEN:
case TRIPPLE_EQUAL_TOKEN:
case LT_EQUAL_TOKEN:
case GT_EQUAL_TOKEN:
case NOT_EQUAL_TOKEN:
case NOT_DOUBLE_EQUAL_TOKEN:
case BITWISE_AND_TOKEN:
case BITWISE_XOR_TOKEN:
case PIPE_TOKEN:
case LOGICAL_AND_TOKEN:
case LOGICAL_OR_TOKEN:
return true;
default:
return false;
}
}
private boolean isParameter(ParserRuleContext ctx) {
switch (ctx) {
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
return true;
default:
return false;
}
}
/**
* Get the expected token kind at the given parser rule context. If the parser rule is a terminal,
* then the corresponding terminal token kind is returned. If the parser rule is a production,
* then {@link SyntaxKind
*
* @param ctx Parser rule context
* @return Token kind expected at the given parser rule
*/
private SyntaxKind getExpectedTokenKind(ParserRuleContext ctx) {
switch (ctx) {
case ASSIGN_OP:
return SyntaxKind.EQUAL_TOKEN;
case BINARY_OPERATOR:
return SyntaxKind.PLUS_TOKEN;
case CLOSE_BRACE:
return SyntaxKind.CLOSE_BRACE_TOKEN;
case CLOSE_PARENTHESIS:
return SyntaxKind.CLOSE_PAREN_TOKEN;
case COMMA:
return SyntaxKind.COMMA_TOKEN;
case EXTERNAL_KEYWORD:
return SyntaxKind.EXTERNAL_KEYWORD;
case FUNCTION_KEYWORD:
return SyntaxKind.FUNCTION_KEYWORD;
case FUNC_NAME:
return SyntaxKind.IDENTIFIER_TOKEN;
case OPEN_BRACE:
return SyntaxKind.OPEN_BRACE_TOKEN;
case OPEN_PARENTHESIS:
return SyntaxKind.OPEN_PAREN_TOKEN;
case RETURN_TYPE_DESCRIPTOR:
case RETURNS_KEYWORD:
return SyntaxKind.RETURNS_KEYWORD;
case SEMICOLON:
return SyntaxKind.SEMICOLON_TOKEN;
case VARIABLE_NAME:
case STATEMENT_START_IDENTIFIER:
return SyntaxKind.IDENTIFIER_TOKEN;
case PUBLIC_KEYWORD:
return SyntaxKind.PUBLIC_KEYWORD;
case SIMPLE_TYPE_DESCRIPTOR:
return SyntaxKind.SIMPLE_TYPE;
case ASSIGNMENT_STMT:
return SyntaxKind.IDENTIFIER_TOKEN;
case EXPRESSION_RHS:
return SyntaxKind.PLUS_TOKEN;
case EXPRESSION:
return SyntaxKind.IDENTIFIER_TOKEN;
case EXTERNAL_FUNC_BODY:
return SyntaxKind.EQUAL_TOKEN;
case FUNC_BODY:
case FUNC_BODY_BLOCK:
return SyntaxKind.OPEN_BRACE_TOKEN;
case FUNC_DEFINITION:
return SyntaxKind.FUNCTION_KEYWORD;
case REQUIRED_PARAM:
return SyntaxKind.SIMPLE_TYPE;
case VAR_DECL_STMT:
return SyntaxKind.SIMPLE_TYPE;
case VAR_DECL_STMT_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case ASSIGNMENT_OR_VAR_DECL_STMT:
return SyntaxKind.SIMPLE_TYPE;
case DEFAULTABLE_PARAM:
return SyntaxKind.SIMPLE_TYPE;
case REST_PARAM:
return SyntaxKind.SIMPLE_TYPE;
case ASTERISK:
return SyntaxKind.ASTERISK_TOKEN;
case CLOSED_RECORD_BODY_END:
return SyntaxKind.CLOSE_BRACE_PIPE_TOKEN;
case CLOSED_RECORD_BODY_START:
return SyntaxKind.OPEN_BRACE_PIPE_TOKEN;
case ELLIPSIS:
return SyntaxKind.ELLIPSIS_TOKEN;
case QUESTION_MARK:
return SyntaxKind.QUESTION_MARK_TOKEN;
case RECORD_BODY_START:
return SyntaxKind.OPEN_BRACE_PIPE_TOKEN;
case RECORD_FIELD:
case RECORD_KEYWORD:
return SyntaxKind.RECORD_KEYWORD;
case TYPE_KEYWORD:
return SyntaxKind.TYPE_KEYWORD;
case TYPE_NAME:
return SyntaxKind.IDENTIFIER_TOKEN;
case TYPE_REFERENCE:
return SyntaxKind.IDENTIFIER_TOKEN;
case RECORD_BODY_END:
return SyntaxKind.CLOSE_BRACE_TOKEN;
case OBJECT_KEYWORD:
return SyntaxKind.OBJECT_KEYWORD;
case PRIVATE_KEYWORD:
return SyntaxKind.PRIVATE_KEYWORD;
case REMOTE_KEYWORD:
return SyntaxKind.REMOTE_KEYWORD;
case OBJECT_FIELD_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case ABSTRACT_KEYWORD:
return SyntaxKind.ABSTRACT_KEYWORD;
case CLIENT_KEYWORD:
return SyntaxKind.CLIENT_KEYWORD;
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
return SyntaxKind.OBJECT_KEYWORD;
case CLOSE_BRACKET:
return SyntaxKind.CLOSE_BRACKET_TOKEN;
case DOT:
return SyntaxKind.DOT_TOKEN;
case FIELD_OR_FUNC_NAME:
return SyntaxKind.IDENTIFIER_TOKEN;
case OPEN_BRACKET:
return SyntaxKind.OPEN_BRACKET_TOKEN;
case IF_KEYWORD:
return SyntaxKind.IF_KEYWORD;
case ELSE_KEYWORD:
return SyntaxKind.ELSE_KEYWORD;
case WHILE_KEYWORD:
return SyntaxKind.WHILE_KEYWORD;
case CHECKING_KEYWORD:
return SyntaxKind.CHECK_KEYWORD;
case AS_KEYWORD:
return SyntaxKind.AS_KEYWORD;
case BOOLEAN_LITERAL:
return SyntaxKind.TRUE_KEYWORD;
case IMPORT_KEYWORD:
return SyntaxKind.IMPORT_KEYWORD;
case IMPORT_MODULE_NAME:
case IMPORT_ORG_OR_MODULE_NAME:
case IMPORT_PREFIX:
case VARIABLE_REF:
case BASIC_LITERAL:
case SERVICE_NAME:
case IDENTIFIER:
case QUALIFIED_IDENTIFIER:
return SyntaxKind.IDENTIFIER_TOKEN;
case VERSION_NUMBER:
case MAJOR_VERSION:
case MINOR_VERSION:
case PATCH_VERSION:
return SyntaxKind.DECIMAL_INTEGER_LITERAL;
case SLASH:
return SyntaxKind.SLASH_TOKEN;
case VERSION_KEYWORD:
return SyntaxKind.VERSION_KEYWORD;
case IMPORT_DECL_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case IMPORT_SUB_VERSION:
return SyntaxKind.SEMICOLON_TOKEN;
case COLON:
return SyntaxKind.COLON_TOKEN;
case MAPPING_FIELD_NAME:
case MAPPING_FIELD:
return SyntaxKind.IDENTIFIER_TOKEN;
case PANIC_KEYWORD:
return SyntaxKind.PANIC_KEYWORD;
case STRING_LITERAL:
return SyntaxKind.STRING_LITERAL;
case ON_KEYWORD:
return SyntaxKind.ON_KEYWORD;
case RESOURCE_KEYWORD:
return SyntaxKind.RESOURCE_KEYWORD;
case RETURN_KEYWORD:
return SyntaxKind.RETURN_KEYWORD;
case SERVICE_KEYWORD:
return SyntaxKind.SERVICE_KEYWORD;
case BREAK_KEYWORD:
return SyntaxKind.BREAK_KEYWORD;
case LISTENER_KEYWORD:
return SyntaxKind.CONST_KEYWORD;
case CONTINUE_KEYWORD:
return SyntaxKind.CONTINUE_KEYWORD;
case CONST_KEYWORD:
return SyntaxKind.CONST_KEYWORD;
case FINAL_KEYWORD:
return SyntaxKind.FINAL_KEYWORD;
case CONST_DECL_TYPE:
return SyntaxKind.IDENTIFIER_TOKEN;
case NIL_TYPE_DESCRIPTOR:
return SyntaxKind.NIL_TYPE;
case TYPEOF_KEYWORD:
return SyntaxKind.TYPEOF_KEYWORD;
case OPTIONAL_TYPE_DESCRIPTOR:
return SyntaxKind.OPTIONAL_TYPE;
case UNARY_OPERATOR:
return SyntaxKind.PLUS_TOKEN;
case AT:
return SyntaxKind.AT_TOKEN;
case FIELD_DESCRIPTOR_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case AFTER_PARAMETER_TYPE:
return SyntaxKind.IDENTIFIER_TOKEN;
case CONST_DECL_RHS:
return SyntaxKind.EQUAL_TOKEN;
case IS_KEYWORD:
return SyntaxKind.IS_KEYWORD;
case TYPE_DESCRIPTOR:
return SyntaxKind.SIMPLE_TYPE;
case COMP_UNIT:
case TOP_LEVEL_NODE:
case TOP_LEVEL_NODE_WITHOUT_METADATA:
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
case ANNOTATIONS:
case PARAM_LIST:
case PARAMETER_RHS:
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case FIELD_OR_REST_DESCIPTOR_RHS:
case MODULE_TYPE_DEFINITION:
case RECORD_TYPE_DESCRIPTOR:
case ARG:
case ARG_LIST:
case EOF:
case FUNC_CALL:
case NAMED_OR_POSITIONAL_ARG_RHS:
case OBJECT_FUNC_OR_FIELD:
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
case OBJECT_MEMBER:
case OBJECT_METHOD_START:
case OBJECT_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR_START:
case AFTER_IMPORT_MODULE_NAME:
case ASSIGNMENT_OR_VAR_DECL_STMT_RHS:
case BLOCK_STMT:
case CALL_STMT:
case CALL_STMT_START:
case DECIMAL_INTEGER_LITERAL:
case ELSE_BLOCK:
case ELSE_BODY:
case IF_BLOCK:
case IMPORT_DECL:
case IMPORT_PREFIX_DECL:
case MAJOR_MINOR_VERSION_END:
case WHILE_BLOCK:
case ACCESS_EXPRESSION:
case IMPORT_VERSION_DECL:
case MAPPING_CONSTRUCTOR:
case PANIC_STMT:
case SPECIFIC_FIELD_RHS:
case COMPUTED_FIELD_NAME:
case LISTENERS_LIST:
case RESOURCE_DEF:
case RETURN_STMT:
case RETURN_STMT_RHS:
case SERVICE_DECL:
case OPTIONAL_SERVICE_NAME:
case BREAK_STATEMENT:
case CONTINUE_STATEMENT:
case LISTENER_DECL:
case CONSTANT_DECL:
case ANNOT_REFERENCE:
case DOC_STRING:
case OBJECT_MEMBER_WITHOUT_METADATA:
case IS_EXPRESSION:
default:
break;
}
return SyntaxKind.NONE;
}
/**
* Check whether a token kind is a basic literal.
*
* @param kind Token kind to check
* @return <code>true</code> if the given token kind belongs to a basic literal.<code>false</code> otherwise
*/
private boolean isBasicLiteral(SyntaxKind kind) {
switch (kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
return true;
default:
return false;
}
}
/**
* Check whether the given token refers to a unary operator.
*
* @param token Token to check
* @return <code>true</code> if the given token refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(STToken token) {
switch (token.kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Search for matching token sequences within is expression and returns the most optimal solution.
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInIsExpression(ParserRuleContext currentCtx, int lookahead, int currentDepth,
int currentMatches) {
STToken nextToken = this.tokenReader.peek(lookahead);
currentDepth++;
if (nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) {
Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth);
return getFinalResult(currentMatches, fixedPathResult);
}
ParserRuleContext nextContext;
STToken nextNextToken = this.tokenReader.peek(lookahead + 1);
switch (nextNextToken.kind) {
case IS_KEYWORD:
startContext(ParserRuleContext.IS_EXPRESSION);
nextContext = ParserRuleContext.IS_KEYWORD;
break;
default:
nextContext = ParserRuleContext.EXPRESSION_RHS;
break;
}
currentMatches++;
lookahead++;
Result result = seekMatch(nextContext, lookahead, currentDepth);
result.ctx = currentCtx;
return getFinalResult(currentMatches, result);
}
public ParserRuleContext findBestPath(ParserRuleContext context) {
int prevLookahead = lookaheadLimit;
lookaheadLimit = (int) (lookaheadLimit * 1.5);
ParserRuleContext[] alternatives;
switch (context) {
case STATEMENT:
alternatives = STATEMENTS;
break;
case TOP_LEVEL_NODE:
alternatives = TOP_LEVEL_NODE;
break;
case OBJECT_MEMBER:
alternatives = OBJECT_MEMBER_START;
break;
default:
throw new IllegalStateException();
}
Result result = seekInAlternativesPaths(1, 0, 0, alternatives);
lookaheadLimit = prevLookahead;
return result.ctx;
}
/**
* Represents a solution/fix for a parser error. A {@link Solution} consists of the parser context where the error
* was encountered, the enclosing parser context at the same point, the token with the error, and the {@link Action}
* required to recover from the error.
*
* @since 1.2.0
*/
public static class Solution {
public ParserRuleContext ctx;
public Action action;
public String tokenText;
public SyntaxKind tokenKind;
public STNode recoveredNode;
public Solution(Action action, ParserRuleContext ctx, SyntaxKind tokenKind, String tokenText) {
this.action = action;
this.ctx = ctx;
this.tokenText = tokenText;
this.tokenKind = tokenKind;
}
@Override
public String toString() {
return action.toString() + "'" + tokenText + "'";
}
}
/**
* Represent a result of a token-sequence-search in a sub-tree. The result will contain the fixes required to
* traverse in that sub-tree, and the number of matching tokens it found, without the fixed tokens.
*/
public static class Result {
private int matches;
private ArrayDeque<Solution> fixes;
/**
* Represent the end solution to be applied to the next immediate token, to recover from the error.
* If the solution is to insert/remove next immediate token, then this is equivalent to the
* <code>fixes.peek()</code>. Else, if the solution is to insert/remove a token that is not the
* immediate next token, then this will have a solution with {@link Action
*/
private Solution solution;
private ParserRuleContext ctx;
public Result(ArrayDeque<Solution> fixes, int matches, ParserRuleContext ctx) {
this.fixes = fixes;
this.matches = matches;
this.ctx = ctx;
}
}
/**
* Represents the actions that can be taken to recover from a parser error.
*
* @since 1.2.0
*/
enum Action {
INSERT, REMOVE, KEEP;
}
} | class BallerinaParserErrorHandler {
private final AbstractTokenReader tokenReader;
private final BallerinaParserErrorListener errorListener;
private final BallerinaParser parser;
private ArrayDeque<ParserRuleContext> ctxStack = new ArrayDeque<>();
/**
* Two or more rules which's left side of the production is same (has alternative paths).
* eg : FUNC_BODIES --> FUNC_BODY_BLOCK
* FUNC_BODIES --> EXTERNAL_FUNC_BODY
*/
private static final ParserRuleContext[] FUNC_BODIES =
{ ParserRuleContext.FUNC_BODY_BLOCK, ParserRuleContext.EXTERNAL_FUNC_BODY };
private static final ParserRuleContext[] STATEMENTS = { ParserRuleContext.CLOSE_BRACE,
ParserRuleContext.ASSIGNMENT_STMT, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.IF_BLOCK,
ParserRuleContext.WHILE_BLOCK, ParserRuleContext.CALL_STMT, ParserRuleContext.PANIC_STMT,
ParserRuleContext.CONTINUE_STATEMENT, ParserRuleContext.BREAK_STATEMENT, ParserRuleContext.RETURN_STMT,
ParserRuleContext.COMPOUND_ASSIGNMENT_STMT };
private static final ParserRuleContext[] VAR_DECL_RHS =
{ ParserRuleContext.SEMICOLON, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] PARAMETER_RHS = { ParserRuleContext.COMMA, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] TOP_LEVEL_NODE =
{ ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD,
ParserRuleContext.FUNC_DEFINITION, ParserRuleContext.MODULE_TYPE_DEFINITION,
ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL,
ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.EOF };
private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_METADATA =
new ParserRuleContext[] { ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.FUNC_DEFINITION,
ParserRuleContext.MODULE_TYPE_DEFINITION, ParserRuleContext.IMPORT_DECL,
ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL, ParserRuleContext.CONSTANT_DECL,
ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.EOF };
private static final ParserRuleContext[] TOP_LEVEL_NODE_WITHOUT_MODIFIER =
{ ParserRuleContext.FUNC_DEFINITION, ParserRuleContext.MODULE_TYPE_DEFINITION,
ParserRuleContext.IMPORT_DECL, ParserRuleContext.SERVICE_DECL, ParserRuleContext.LISTENER_DECL,
ParserRuleContext.CONSTANT_DECL, ParserRuleContext.VAR_DECL_STMT, ParserRuleContext.EOF };
private static final ParserRuleContext[] TYPE_OR_VAR_NAME =
{ ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN =
{ ParserRuleContext.ASSIGN_OP, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] FIELD_DESCRIPTOR_RHS =
{ ParserRuleContext.SEMICOLON, ParserRuleContext.QUESTION_MARK, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] FIELD_OR_REST_DESCIPTOR_RHS =
{ ParserRuleContext.ELLIPSIS, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] RECORD_BODY_START =
{ ParserRuleContext.CLOSED_RECORD_BODY_START, ParserRuleContext.OPEN_BRACE };
private static final ParserRuleContext[] RECORD_BODY_END =
{ ParserRuleContext.CLOSED_RECORD_BODY_END, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] TYPE_DESCRIPTORS =
{ ParserRuleContext.SIMPLE_TYPE_DESCRIPTOR, ParserRuleContext.RECORD_TYPE_DESCRIPTOR,
ParserRuleContext.OBJECT_TYPE_DESCRIPTOR, ParserRuleContext.NIL_TYPE_DESCRIPTOR };
private static final ParserRuleContext[] RECORD_FIELD =
{ ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESCRIPTOR };
private static final ParserRuleContext[] RECORD_FIELD_WITHOUT_METADATA =
{ ParserRuleContext.ASTERISK, ParserRuleContext.TYPE_DESCRIPTOR };
private static final ParserRuleContext[] ARG_START =
{ ParserRuleContext.VARIABLE_NAME, ParserRuleContext.ELLIPSIS, ParserRuleContext.EXPRESSION };
private static final ParserRuleContext[] NAMED_OR_POSITIONAL_ARG_RHS =
{ ParserRuleContext.COMMA, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] PARAM_LIST =
{ ParserRuleContext.CLOSE_PARENTHESIS, ParserRuleContext.REQUIRED_PARAM };
private static final ParserRuleContext[] OBJECT_FIELD_RHS =
{ ParserRuleContext.SEMICOLON, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] OBJECT_MEMBER_START =
{ ParserRuleContext.DOC_STRING, ParserRuleContext.ANNOTATIONS, ParserRuleContext.ASTERISK,
ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] OBJECT_MEMBER_WITHOUT_METADATA =
{ ParserRuleContext.ASTERISK, ParserRuleContext.OBJECT_FUNC_OR_FIELD, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD = { ParserRuleContext.PUBLIC_KEYWORD,
ParserRuleContext.PRIVATE_KEYWORD, ParserRuleContext.OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY };
private static final ParserRuleContext[] OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY =
{ ParserRuleContext.TYPE_DESCRIPTOR, ParserRuleContext.OBJECT_METHOD_START };
private static final ParserRuleContext[] OBJECT_METHOD_START =
{ ParserRuleContext.REMOTE_KEYWORD, ParserRuleContext.FUNCTION_KEYWORD };
private static final ParserRuleContext[] OBJECT_TYPE_DESCRIPTOR_START =
{ ParserRuleContext.OBJECT_TYPE_FIRST_QUALIFIER, ParserRuleContext.OBJECT_KEYWORD };
private static final ParserRuleContext[] ELSE_BODY = { ParserRuleContext.IF_BLOCK, ParserRuleContext.OPEN_BRACE };
private static final ParserRuleContext[] ELSE_BLOCK =
{ ParserRuleContext.ELSE_KEYWORD, ParserRuleContext.STATEMENT };
private static final ParserRuleContext[] CALL_STATEMENT =
{ ParserRuleContext.CHECKING_KEYWORD, ParserRuleContext.VARIABLE_NAME };
private static final ParserRuleContext[] IMPORT_PREFIX_DECL =
{ ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] IMPORT_VERSION =
{ ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] IMPORT_DECL_RHS = { ParserRuleContext.SLASH, ParserRuleContext.DOT,
ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] AFTER_IMPORT_MODULE_NAME = { ParserRuleContext.DOT,
ParserRuleContext.VERSION_KEYWORD, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] MAJOR_MINOR_VERSION_END =
{ ParserRuleContext.DOT, ParserRuleContext.AS_KEYWORD, ParserRuleContext.SEMICOLON };
private static final ParserRuleContext[] RETURN_RHS = { ParserRuleContext.SEMICOLON, ParserRuleContext.EXPRESSION };
private static final ParserRuleContext[] EXPRESSIONS = { ParserRuleContext.BASIC_LITERAL,
ParserRuleContext.VARIABLE_REF, ParserRuleContext.ACCESS_EXPRESSION, ParserRuleContext.TYPEOF_EXPRESSION,
ParserRuleContext.UNARY_EXPRESSION, ParserRuleContext.IS_EXPRESSION };
private static final ParserRuleContext[] MAPPING_FIELD_START = { ParserRuleContext.MAPPING_FIELD_NAME,
ParserRuleContext.STRING_LITERAL, ParserRuleContext.COMPUTED_FIELD_NAME, ParserRuleContext.ELLIPSIS };
private static final ParserRuleContext[] SPECIFIC_FIELD_RHS =
{ ParserRuleContext.COLON, ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS };
private static final ParserRuleContext[] OPTIONAL_SERVICE_NAME =
{ ParserRuleContext.SERVICE_NAME, ParserRuleContext.ON_KEYWORD };
private static final ParserRuleContext[] RESOURCE_DEF_START =
{ ParserRuleContext.RESOURCE_KEYWORD, ParserRuleContext.FUNC_DEFINITION, ParserRuleContext.CLOSE_BRACE };
private static final ParserRuleContext[] CONST_DECL_RHS =
{ ParserRuleContext.STATEMENT_START_IDENTIFIER, ParserRuleContext.ASSIGN_OP };
private static final ParserRuleContext[] PARAMETER =
{ ParserRuleContext.ANNOTATIONS, ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESCRIPTOR };
private static final ParserRuleContext[] PARAMETER_WITHOUT_ANNOTS =
{ ParserRuleContext.PUBLIC_KEYWORD, ParserRuleContext.TYPE_DESCRIPTOR };
/**
* Limit for the distance to travel, to determine a successful lookahead.
*/
private int lookaheadLimit = 5;
public BallerinaParserErrorHandler(AbstractTokenReader tokenReader, BallerinaParser parser) {
this.tokenReader = tokenReader;
this.parser = parser;
this.errorListener = new BallerinaParserErrorListener();
}
public void startContext(ParserRuleContext context) {
this.ctxStack.push(context);
}
public void endContext() {
this.ctxStack.pop();
}
public void switchContext(ParserRuleContext context) {
this.ctxStack.pop();
this.ctxStack.push(context);
}
public void reportInvalidNode(STToken startingToken, String message) {
this.errorListener.reportInvalidNodeError(startingToken, message);
}
public void reportMissingTokenError(String message) {
STToken currentToken = this.tokenReader.head();
this.errorListener.reportMissingTokenError(currentToken, message);
}
private ParserRuleContext getParentContext() {
return this.ctxStack.peek();
}
/*
* -------------- Error recovering --------------
*/
/**
* Recover from current context. Returns the action needs to be taken with respect
* to the next token, in order to recover. This method will search for the most
* optimal action, that will result the parser to proceed the farthest distance.
*
* @param nextToken Next token of the input where the error occurred
* @param currentCtx Current parser context
* @param args Arguments that requires to continue parsing from the given parser context
* @return The action needs to be taken for the next token, in order to recover
*/
public Solution recover(ParserRuleContext currentCtx, STToken nextToken, Object... args) {
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
SyntaxKind expectedTokenKind = getExpectedTokenKind(currentCtx);
Solution fix = new Solution(Action.INSERT, currentCtx, expectedTokenKind, currentCtx.toString());
applyFix(currentCtx, fix, args);
return fix;
}
Result bestMatch = seekMatch(currentCtx);
if (bestMatch.matches > 0) {
Solution sol = bestMatch.solution;
applyFix(currentCtx, sol, args);
return sol;
} else {
removeInvalidToken();
Solution sol = new Solution(Action.REMOVE, currentCtx, nextToken.kind, nextToken.toString());
sol.recoveredNode = this.parser.resumeParsing(currentCtx, args);
return sol;
}
}
/**
* Remove the invalid token. This method assumes that the next immediate token
* of the token input stream is the culprit.
*/
public void removeInvalidToken() {
STToken invalidToken = this.tokenReader.read();
this.errorListener.reportInvalidToken(invalidToken);
}
/**
* Apply the fix to the current context.
*
* @param currentCtx Current context
* @param fix Fix to apply
* @param args Arguments that requires to continue parsing from the given parser context
*/
private void applyFix(ParserRuleContext currentCtx, Solution fix, Object... args) {
if (fix.action == Action.REMOVE) {
removeInvalidToken();
fix.recoveredNode = this.parser.resumeParsing(currentCtx, args);
} else {
fix.recoveredNode = handleMissingToken(currentCtx, fix);
}
}
/**
* Handle a missing token scenario.
*
* @param currentCtx Current context
* @param fix Solution to recover from the missing token
*/
private STNode handleMissingToken(ParserRuleContext currentCtx, Solution fix) {
if (!isProductionWithAlternatives(currentCtx)) {
reportMissingTokenError("missing " + fix.ctx);
}
return STNodeFactory.createMissingToken(fix.tokenKind);
}
/**
* Get a snapshot of the current context stack.
*
* @return Snapshot of the current context stack
*/
private ArrayDeque<ParserRuleContext> getCtxStackSnapshot() {
return this.ctxStack.clone();
}
private boolean isProductionWithAlternatives(ParserRuleContext currentCtx) {
switch (currentCtx) {
case TOP_LEVEL_NODE:
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
case TOP_LEVEL_NODE_WITHOUT_METADATA:
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case FUNC_BODY:
case VAR_DECL_STMT_RHS:
case EXPRESSION_RHS:
case PARAMETER_RHS:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case AFTER_PARAMETER_TYPE:
case FIELD_DESCRIPTOR_RHS:
case RECORD_BODY_START:
case RECORD_BODY_END:
case TYPE_DESCRIPTOR:
case NAMED_OR_POSITIONAL_ARG_RHS:
case OBJECT_FIELD_RHS:
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
case OBJECT_MEMBER:
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
case ELSE_BODY:
case IMPORT_DECL_RHS:
case IMPORT_SUB_VERSION:
case VERSION_NUMBER:
case IMPORT_VERSION_DECL:
case IMPORT_PREFIX_DECL:
case MAPPING_FIELD:
case SPECIFIC_FIELD_RHS:
case RESOURCE_DEF:
case PARAMETER_WITHOUT_ANNOTS:
case PARAMETER:
return true;
default:
return false;
}
}
/*
* seekMatch methods
*/
/**
* Start a fresh search for a way to recover with the next immediate token (peek(1), and the current context).
*
* @param currentCtx Current parser context
* @return Recovery result
*/
private Result seekMatch(ParserRuleContext currentCtx) {
return seekMatchInSubTree(currentCtx, 1, 0);
}
/**
* Search for a solution in a sub-tree/sub-path. This will take a snapshot of the current context stack
* and will operate on top of it, so that the original state of the parser will not be disturbed. On return
* the previous state of the parser contexts will be restored.
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, from the position of the original error.
* @param currentDepth Amount of distance traveled so far.
* @return Recovery result
*/
private Result seekMatchInSubTree(ParserRuleContext currentCtx, int lookahead, int currentDepth) {
ArrayDeque<ParserRuleContext> tempCtxStack = this.ctxStack;
this.ctxStack = getCtxStackSnapshot();
Result result = seekMatch(currentCtx, lookahead, currentDepth);
result.ctx = currentCtx;
this.ctxStack = tempCtxStack;
return result;
}
/**
* TODO: This is a duplicate method. Same as {@link BallerinaParser
*
* @param token
* @return
*/
private boolean isEndOfBlock(STToken token) {
ParserRuleContext enclosingContext = getParentContext();
switch (enclosingContext) {
case OBJECT_TYPE_DESCRIPTOR:
case SERVICE_DECL:
switch (token.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
return true;
default:
return false;
}
case BLOCK_STMT:
switch (token.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case ELSE_KEYWORD:
return true;
default:
return false;
}
default:
switch (token.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case SERVICE_KEYWORD:
case RESOURCE_KEYWORD:
return true;
default:
return false;
}
}
}
private boolean isEndOfObjectTypeNode(int nextLookahead) {
STToken nextToken = this.tokenReader.peek(nextLookahead);
switch (nextToken.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case SERVICE_KEYWORD:
return true;
default:
STToken nextNextToken = this.tokenReader.peek(nextLookahead + 1);
switch (nextNextToken.kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
case CLOSE_BRACE_PIPE_TOKEN:
case TYPE_KEYWORD:
case SERVICE_KEYWORD:
return true;
default:
return false;
}
}
}
private boolean isEndOfParametersList(STToken token) {
switch (token.kind) {
case OPEN_BRACE_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RETURNS_KEYWORD:
return true;
default:
return false;
}
}
private boolean isEndOfParameter(STToken token) {
switch (token.kind) {
case OPEN_BRACE_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case COMMA_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RETURNS_KEYWORD:
return true;
default:
return false;
}
}
/**
* Search for a solution.
* Terminals are directly matched and Non-terminals which have alternative productions are seekInAlternativesPaths()
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, relative to the position of the original error.
* @param currentDepth Amount of distance traveled so far.
* @return Recovery result
*/
private Result seekMatch(ParserRuleContext currentCtx, int lookahead, int currentDepth) {
boolean hasMatch;
boolean skipRule;
int matchingRulesCount = 0;
boolean isEntryPoint = true;
while (currentDepth < lookaheadLimit) {
hasMatch = true;
skipRule = false;
STToken nextToken = this.tokenReader.peek(lookahead);
switch (currentCtx) {
case EOF:
hasMatch = nextToken.kind == SyntaxKind.EOF_TOKEN;
break;
case PUBLIC_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.PUBLIC_KEYWORD;
break;
case PRIVATE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.PRIVATE_KEYWORD;
break;
case REMOTE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.REMOTE_KEYWORD;
break;
case TOP_LEVEL_NODE:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, TOP_LEVEL_NODE);
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
TOP_LEVEL_NODE_WITHOUT_MODIFIER);
case TOP_LEVEL_NODE_WITHOUT_METADATA:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
TOP_LEVEL_NODE_WITHOUT_METADATA);
case FUNCTION_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.FUNCTION_KEYWORD;
break;
case FUNC_NAME:
case VARIABLE_NAME:
case TYPE_NAME:
case FIELD_OR_FUNC_NAME:
case IMPORT_ORG_OR_MODULE_NAME:
case IMPORT_MODULE_NAME:
case IMPORT_PREFIX:
case MAPPING_FIELD_NAME:
case SERVICE_NAME:
case QUALIFIED_IDENTIFIER:
case IDENTIFIER:
hasMatch = nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN;
break;
case OPEN_PARENTHESIS:
hasMatch = nextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN;
break;
case CLOSE_PARENTHESIS:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN;
break;
case RETURNS_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RETURNS_KEYWORD;
if (!hasMatch) {
skipRule = true;
}
break;
case SIMPLE_TYPE_DESCRIPTOR:
case CONST_DECL_TYPE:
hasMatch =
nextToken.kind == SyntaxKind.SIMPLE_TYPE || nextToken.kind == SyntaxKind.SERVICE_KEYWORD ||
nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN;
break;
case FUNC_BODY:
return seekInFuncBodies(lookahead, currentDepth, matchingRulesCount);
case OPEN_BRACE:
hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_TOKEN;
break;
case CLOSE_BRACE:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_TOKEN;
break;
case ASSIGN_OP:
hasMatch = nextToken.kind == SyntaxKind.EQUAL_TOKEN;
break;
case EXTERNAL_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.EXTERNAL_KEYWORD;
break;
case SEMICOLON:
hasMatch = nextToken.kind == SyntaxKind.SEMICOLON_TOKEN;
break;
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
if (isEndOfBlock(nextToken)) {
skipRule = true;
break;
}
return seekInStatements(currentCtx, nextToken, lookahead, currentDepth, matchingRulesCount);
case BINARY_OPERATOR:
hasMatch = isBinaryOperator(nextToken);
break;
case EXPRESSION:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, EXPRESSIONS);
case VAR_DECL_STMT_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, VAR_DECL_RHS);
case EXPRESSION_RHS:
return seekMatchInExpressionRhs(nextToken, lookahead, currentDepth, matchingRulesCount);
case COMMA:
hasMatch = nextToken.kind == SyntaxKind.COMMA_TOKEN;
break;
case PARAM_LIST:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, PARAM_LIST);
case PARAMETER_RHS:
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case REQUIRED_PARAM:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, PARAMETER_RHS);
case DEFAULTABLE_PARAM:
case REST_PARAM:
skipRule = true;
break;
default:
throw new IllegalStateException();
}
break;
case STATEMENT_START_IDENTIFIER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, TYPE_OR_VAR_NAME);
case ASSIGNMENT_OR_VAR_DECL_STMT_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
ASSIGNMENT_OR_VAR_DECL_SECOND_TOKEN);
case CLOSED_RECORD_BODY_END:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACE_PIPE_TOKEN;
break;
case CLOSED_RECORD_BODY_START:
hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACE_PIPE_TOKEN;
break;
case ELLIPSIS:
hasMatch = nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN;
break;
case QUESTION_MARK:
hasMatch = nextToken.kind == SyntaxKind.QUESTION_MARK_TOKEN;
break;
case RECORD_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RECORD_KEYWORD;
break;
case TYPE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.TYPE_KEYWORD;
break;
case FIELD_DESCRIPTOR_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, FIELD_DESCRIPTOR_RHS);
case FIELD_OR_REST_DESCIPTOR_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
FIELD_OR_REST_DESCIPTOR_RHS);
case RECORD_BODY_END:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RECORD_BODY_END);
case RECORD_BODY_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RECORD_BODY_START);
case TYPE_DESCRIPTOR:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, TYPE_DESCRIPTORS);
case RECORD_FIELD:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RECORD_FIELD);
case RECORD_FIELD_WITHOUT_METADATA:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
RECORD_FIELD_WITHOUT_METADATA);
case ARG:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, ARG_START);
case NAMED_OR_POSITIONAL_ARG_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
NAMED_OR_POSITIONAL_ARG_RHS);
case OBJECT_MEMBER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_MEMBER_START);
case OBJECT_MEMBER_WITHOUT_METADATA:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
OBJECT_MEMBER_WITHOUT_METADATA);
case OBJECT_FIELD_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_FIELD_RHS);
case OBJECT_METHOD_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_METHOD_START);
case OBJECT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.OBJECT_KEYWORD;
break;
case OBJECT_FUNC_OR_FIELD:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OBJECT_FUNC_OR_FIELD);
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY);
case OBJECT_TYPE_DESCRIPTOR_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
OBJECT_TYPE_DESCRIPTOR_START);
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
if (currentDepth == 0) {
hasMatch = false;
break;
}
hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD ||
nextToken.kind == SyntaxKind.CLIENT_KEYWORD;
break;
case ABSTRACT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.ABSTRACT_KEYWORD;
break;
case CLIENT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CLIENT_KEYWORD;
break;
case OPEN_BRACKET:
hasMatch = nextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN;
break;
case CLOSE_BRACKET:
hasMatch = nextToken.kind == SyntaxKind.CLOSE_BRACKET_TOKEN;
break;
case DOT:
hasMatch = nextToken.kind == SyntaxKind.DOT_TOKEN;
break;
case IF_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.IF_KEYWORD;
break;
case ELSE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.ELSE_KEYWORD;
break;
case ELSE_BLOCK:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, ELSE_BLOCK);
case ELSE_BODY:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, ELSE_BODY);
case WHILE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.WHILE_KEYWORD;
break;
case CHECKING_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CHECK_KEYWORD ||
nextToken.kind == SyntaxKind.CHECKPANIC_KEYWORD;
break;
case CALL_STMT_START:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, CALL_STATEMENT);
case PANIC_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.PANIC_KEYWORD;
break;
case AS_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.AS_KEYWORD;
break;
case BOOLEAN_LITERAL:
hasMatch = nextToken.kind == SyntaxKind.TRUE_KEYWORD || nextToken.kind == SyntaxKind.FALSE_KEYWORD;
break;
case DECIMAL_INTEGER_LITERAL:
case MAJOR_VERSION:
case MINOR_VERSION:
case PATCH_VERSION:
hasMatch = nextToken.kind == SyntaxKind.DECIMAL_INTEGER_LITERAL;
break;
case IMPORT_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.IMPORT_KEYWORD;
break;
case SLASH:
hasMatch = nextToken.kind == SyntaxKind.SLASH_TOKEN;
break;
case VERSION_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.VERSION_KEYWORD;
break;
case CONTINUE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CONTINUE_KEYWORD;
break;
case BREAK_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.BREAK_KEYWORD;
break;
case IMPORT_PREFIX_DECL:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, IMPORT_PREFIX_DECL);
case IMPORT_VERSION_DECL:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, IMPORT_VERSION);
case IMPORT_DECL_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, IMPORT_DECL_RHS);
case AFTER_IMPORT_MODULE_NAME:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
AFTER_IMPORT_MODULE_NAME);
case MAJOR_MINOR_VERSION_END:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
MAJOR_MINOR_VERSION_END);
case RETURN_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RETURN_KEYWORD;
break;
case RETURN_STMT_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RETURN_RHS);
case ACCESS_EXPRESSION:
return seekInAccessExpression(currentCtx, lookahead, currentDepth, matchingRulesCount);
case BASIC_LITERAL:
hasMatch = isBasicLiteral(nextToken.kind);
break;
case COLON:
hasMatch = nextToken.kind == SyntaxKind.COLON_TOKEN;
break;
case STRING_LITERAL:
hasMatch = nextToken.kind == SyntaxKind.STRING_LITERAL;
break;
case MAPPING_FIELD:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, MAPPING_FIELD_START);
case SPECIFIC_FIELD_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, SPECIFIC_FIELD_RHS);
case SERVICE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.SERVICE_KEYWORD;
break;
case ON_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.ON_KEYWORD;
break;
case OPTIONAL_SERVICE_NAME:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, OPTIONAL_SERVICE_NAME);
case RESOURCE_DEF:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, RESOURCE_DEF_START);
case RESOURCE_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.RESOURCE_KEYWORD;
break;
case LISTENER_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.LISTENER_KEYWORD;
break;
case CONST_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.CONST_KEYWORD;
break;
case FINAL_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.FINAL_KEYWORD;
break;
case CONST_DECL_RHS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, CONST_DECL_RHS);
case TYPEOF_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.TYPEOF_KEYWORD;
break;
case UNARY_OPERATOR:
hasMatch = isUnaryOperator(nextToken);
break;
case AT:
hasMatch = nextToken.kind == SyntaxKind.AT_TOKEN;
break;
case PARAMETER:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount, PARAMETER);
case PARAMETER_WITHOUT_ANNOTS:
return seekInAlternativesPaths(lookahead, currentDepth, matchingRulesCount,
PARAMETER_WITHOUT_ANNOTS);
case IS_KEYWORD:
hasMatch = nextToken.kind == SyntaxKind.IS_KEYWORD;
break;
case IS_EXPRESSION:
return seekInIsExpression(currentCtx, lookahead, currentDepth, matchingRulesCount);
case COMP_UNIT:
case FUNC_DEFINITION:
case RETURN_TYPE_DESCRIPTOR:
case EXTERNAL_FUNC_BODY:
case FUNC_BODY_BLOCK:
case ASSIGNMENT_STMT:
case VAR_DECL_STMT:
case REQUIRED_PARAM:
case AFTER_PARAMETER_TYPE:
case DEFAULTABLE_PARAM:
case REST_PARAM:
case MODULE_TYPE_DEFINITION:
case ARG_LIST:
case ASTERISK:
case FUNC_CALL:
case RECORD_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case CALL_STMT:
case IF_BLOCK:
case BLOCK_STMT:
case WHILE_BLOCK:
case VERSION_NUMBER:
case IMPORT_DECL:
case IMPORT_SUB_VERSION:
case MAPPING_CONSTRUCTOR:
case PANIC_STMT:
case COMPUTED_FIELD_NAME:
case RETURN_STMT:
case LISTENERS_LIST:
case SERVICE_DECL:
case BREAK_STATEMENT:
case CONTINUE_STATEMENT:
case LISTENER_DECL:
case CONSTANT_DECL:
case NIL_TYPE_DESCRIPTOR:
case OPTIONAL_TYPE_DESCRIPTOR:
case ANNOTATIONS:
case DOC_STRING:
case VARIABLE_REF:
case TYPE_REFERENCE:
case ANNOT_REFERENCE:
default:
skipRule = true;
hasMatch = true;
break;
}
if (!hasMatch) {
Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth + 1);
if (isEntryPoint) {
fixedPathResult.solution = fixedPathResult.fixes.peek();
} else {
fixedPathResult.solution = new Solution(Action.KEEP, currentCtx, getExpectedTokenKind(currentCtx),
currentCtx.toString());
}
return getFinalResult(matchingRulesCount, fixedPathResult);
}
currentCtx = getNextRule(currentCtx, lookahead + 1);
if (!skipRule) {
currentDepth++;
matchingRulesCount++;
lookahead++;
isEntryPoint = false;
}
}
Result result = new Result(new ArrayDeque<>(), matchingRulesCount, currentCtx);
result.solution =
new Solution(Action.KEEP, currentCtx, getExpectedTokenKind(currentCtx), currentCtx.toString());
return result;
}
/**
* Search for matching token sequences within the function body signatures and returns the most optimal solution.
* This will check whether the token stream best matches to a 'function-body-block' or a 'external-function-body'.
*
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInFuncBodies(int lookahead, int currentDepth, int currentMatches) {
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, FUNC_BODIES);
}
/**
* Search for matching token sequences within different kinds of statements and returns the most optimal solution.
*
* @param currentCtx Current context
* @param nextToken Next token in the token stream
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInStatements(ParserRuleContext currentCtx, STToken nextToken, int lookahead, int currentDepth,
int currentMatches) {
if (nextToken.kind == SyntaxKind.SEMICOLON_TOKEN) {
Result result = seekMatchInSubTree(ParserRuleContext.STATEMENT, lookahead + 1, currentDepth);
result.fixes.push(new Solution(Action.REMOVE, currentCtx, nextToken.kind, nextToken.toString()));
return getFinalResult(currentMatches, result);
}
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, STATEMENTS);
}
/**
* Search for matching token sequences within access expressions and returns the most optimal solution.
* Access expression can be one of: method-call, field-access, member-access.
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInAccessExpression(ParserRuleContext currentCtx, int lookahead, int currentDepth,
int currentMatches) {
STToken nextToken = this.tokenReader.peek(lookahead);
currentDepth++;
if (nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) {
Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth);
return getFinalResult(currentMatches, fixedPathResult);
}
ParserRuleContext nextContext;
STToken nextNextToken = this.tokenReader.peek(lookahead + 1);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
nextContext = ParserRuleContext.OPEN_PARENTHESIS;
break;
case DOT_TOKEN:
nextContext = ParserRuleContext.DOT;
break;
case OPEN_BRACKET_TOKEN:
nextContext = ParserRuleContext.OPEN_BRACKET;
break;
default:
nextContext = ParserRuleContext.EXPRESSION_RHS;
break;
}
currentMatches++;
lookahead++;
Result result = seekMatch(nextContext, lookahead, currentDepth);
result.ctx = currentCtx;
return getFinalResult(currentMatches, result);
}
/**
* Search for a match in rhs of an expression. RHS of an expression can be the end
* of the expression or the rhs of a binary expression.
*
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @return Recovery result
*/
private Result seekMatchInExpressionRhs(STToken nextToken, int lookahead, int currentDepth, int currentMatches) {
ParserRuleContext parentCtx = getParentContext();
if (isParameter(parentCtx) || parentCtx == ParserRuleContext.ARG) {
ParserRuleContext[] next = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT,
ParserRuleContext.OPEN_BRACKET, ParserRuleContext.COMMA, ParserRuleContext.CLOSE_PARENTHESIS };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
if (parentCtx == ParserRuleContext.MAPPING_CONSTRUCTOR) {
ParserRuleContext[] next = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT,
ParserRuleContext.OPEN_BRACKET, ParserRuleContext.COMMA, ParserRuleContext.CLOSE_BRACE };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
if (parentCtx == ParserRuleContext.COMPUTED_FIELD_NAME) {
ParserRuleContext[] next = { ParserRuleContext.CLOSE_BRACKET, ParserRuleContext.BINARY_OPERATOR,
ParserRuleContext.DOT, ParserRuleContext.OPEN_BRACKET };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
if (parentCtx == ParserRuleContext.LISTENERS_LIST) {
ParserRuleContext[] next = { ParserRuleContext.COMMA, ParserRuleContext.BINARY_OPERATOR,
ParserRuleContext.DOT, ParserRuleContext.OPEN_BRACKET, ParserRuleContext.OPEN_BRACE };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, next);
}
ParserRuleContext nextContext;
if (parentCtx == ParserRuleContext.IF_BLOCK || parentCtx == ParserRuleContext.WHILE_BLOCK) {
nextContext = ParserRuleContext.BLOCK_STMT;
} else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.RECORD_FIELD ||
parentCtx == ParserRuleContext.OBJECT_MEMBER || parentCtx == ParserRuleContext.LISTENER_DECL ||
parentCtx == ParserRuleContext.CONSTANT_DECL) {
nextContext = ParserRuleContext.SEMICOLON;
} else if (parentCtx == ParserRuleContext.ANNOTATIONS) {
nextContext = ParserRuleContext.TOP_LEVEL_NODE;
} else {
throw new IllegalStateException();
}
ParserRuleContext[] alternatives = { ParserRuleContext.BINARY_OPERATOR, ParserRuleContext.DOT,
ParserRuleContext.OPEN_BRACKET, ParserRuleContext.OPEN_PARENTHESIS, ParserRuleContext.IS_KEYWORD,
nextContext };
return seekInAlternativesPaths(lookahead, currentDepth, currentMatches, alternatives);
}
/**
* Search for matching token sequences within the given alternative paths, and find the most optimal solution.
*
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInAlternativesPaths(int lookahead, int currentDepth, int currentMatches,
ParserRuleContext[] alternativeRules) {
@SuppressWarnings("unchecked")
List<Result>[] results = new List[lookaheadLimit];
int bestMatchIndex = 0;
for (ParserRuleContext rule : alternativeRules) {
Result result = seekMatchInSubTree(rule, lookahead, currentDepth);
List<Result> similarResutls = results[result.matches];
if (similarResutls == null) {
similarResutls = new ArrayList<>(lookaheadLimit);
results[result.matches] = similarResutls;
if (bestMatchIndex < result.matches) {
bestMatchIndex = result.matches;
}
}
similarResutls.add(result);
}
if (bestMatchIndex == 0) {
return new Result(new ArrayDeque<>(), currentMatches, alternativeRules[0]);
}
List<Result> bestMatches = results[bestMatchIndex];
Result bestMatch = bestMatches.get(0);
Result currentMatch;
for (int i = 1; i < bestMatches.size(); i++) {
currentMatch = bestMatches.get(i);
int currentMatchFixesSize = currentMatch.fixes.size();
int bestmatchFixesSize = bestMatch.fixes.size();
if (currentMatchFixesSize == bestmatchFixesSize) {
if (bestmatchFixesSize == 0) {
continue;
}
Solution currentSol = bestMatch.fixes.peek();
Solution foundSol = currentMatch.fixes.peek();
if (currentSol.action == Action.REMOVE && foundSol.action == Action.INSERT) {
bestMatch = currentMatch;
}
}
if (currentMatchFixesSize < bestmatchFixesSize) {
bestMatch = currentMatch;
}
}
return getFinalResult(currentMatches, bestMatch);
}
/**
* Combine a given result with the current results, and get the final result.
*
* @param currentMatches Matches found so far
* @param bestMatch Result found in the sub-tree, that requires to be merged with the current results
* @return Final result
*/
private Result getFinalResult(int currentMatches, Result bestMatch) {
bestMatch.matches += currentMatches;
return bestMatch;
}
/**
* <p>
* Fix the error at the current position and continue forward to find the best path. This method
* tries to fix the parser error using following steps:
* <ol>
* <li>
* Insert a token and see how far the parser can proceed.
* </li>
* <li>
* Delete a token and see how far the parser can proceed.
* </li>
* </ol>
*
* Then decides the best action to perform (whether to insert or remove a token), using the result
* of the above two steps, based on the following criteria:
* <ol>
* <li>
* Pick the solution with the longest matching sequence.
* </li>
* <li>
* If there's a tie, then check for the solution which requires the lowest number of 'fixes'.
* </li>
* <li>
* If there's a tie, then give priority for the 'insertion' as that doesn't require removing
* an input a user has given.
* </li>
* </ol>
* </p>
*
* @param currentCtx Current parser context
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @return Recovery result
*/
private Result fixAndContinue(ParserRuleContext currentCtx, int lookahead, int currentDepth) {
Result deletionResult = seekMatchInSubTree(currentCtx, lookahead + 1, currentDepth);
ParserRuleContext nextCtx = getNextRule(currentCtx, lookahead);
Result insertionResult = seekMatchInSubTree(nextCtx, lookahead, currentDepth);
Result fixedPathResult;
Solution action;
if (insertionResult.matches == 0 && deletionResult.matches == 0) {
fixedPathResult = insertionResult;
} else if (insertionResult.matches == deletionResult.matches) {
if (insertionResult.fixes.size() <= deletionResult.fixes.size()) {
action = new Solution(Action.INSERT, currentCtx, getExpectedTokenKind(currentCtx),
currentCtx.toString());
insertionResult.fixes.push(action);
fixedPathResult = insertionResult;
} else {
STToken token = this.tokenReader.peek(lookahead);
action = new Solution(Action.REMOVE, currentCtx, token.kind, token.toString());
deletionResult.fixes.push(action);
fixedPathResult = deletionResult;
}
} else if (insertionResult.matches > deletionResult.matches) {
action = new Solution(Action.INSERT, currentCtx, getExpectedTokenKind(currentCtx), currentCtx.toString());
insertionResult.fixes.push(action);
fixedPathResult = insertionResult;
} else {
STToken token = this.tokenReader.peek(lookahead);
action = new Solution(Action.REMOVE, currentCtx, token.kind, token.toString());
deletionResult.fixes.push(action);
fixedPathResult = deletionResult;
}
return fixedPathResult;
}
/**
* Get the next parser rule/context given the current parser context.
*
* @param currentCtx Current parser context
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForParamType() {
ParserRuleContext parentCtx;
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.REQUIRED_PARAM || parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) {
return ParserRuleContext.VARIABLE_NAME;
} else if (parentCtx == ParserRuleContext.REST_PARAM) {
return ParserRuleContext.ELLIPSIS;
} else {
throw new IllegalStateException();
}
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForComma() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case PARAM_LIST:
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
endContext();
return parentCtx;
case ARG:
return parentCtx;
case MAPPING_CONSTRUCTOR:
return ParserRuleContext.MAPPING_FIELD;
case LISTENERS_LIST:
return ParserRuleContext.EXPRESSION;
default:
throw new IllegalStateException();
}
}
/**
* Get the next parser context to visit after a type descriptor.
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForTypeDescriptor() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case RECORD_FIELD:
case OBJECT_MEMBER:
case LISTENER_DECL:
case CONSTANT_DECL:
return ParserRuleContext.VARIABLE_NAME;
case MODULE_TYPE_DEFINITION:
return ParserRuleContext.SEMICOLON;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.FUNC_BODY;
case OPTIONAL_TYPE_DESCRIPTOR:
return ParserRuleContext.QUESTION_MARK;
case IS_EXPRESSION:
endContext();
return ParserRuleContext.EXPRESSION_RHS;
default:
if (isStatement(parentCtx) || isParameter(parentCtx)) {
return ParserRuleContext.VARIABLE_NAME;
}
}
throw new IllegalStateException();
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForEqualOp() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case EXTERNAL_FUNC_BODY:
return ParserRuleContext.EXTERNAL_KEYWORD;
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case RECORD_FIELD:
case ARG:
case OBJECT_MEMBER:
case LISTENER_DECL:
case CONSTANT_DECL:
return ParserRuleContext.EXPRESSION;
default:
if (isStatement(parentCtx)) {
return ParserRuleContext.EXPRESSION;
}
throw new IllegalStateException();
}
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
private ParserRuleContext getNextRuleForCloseBrace(int nextLookahead) {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case FUNC_BODY_BLOCK:
endContext();
endContext();
STToken nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.SERVICE_DECL) {
return ParserRuleContext.RESOURCE_DEF;
} else if (parentCtx == ParserRuleContext.OBJECT_TYPE_DESCRIPTOR) {
return ParserRuleContext.OBJECT_MEMBER;
} else {
return ParserRuleContext.TOP_LEVEL_NODE;
}
case SERVICE_DECL:
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
case OBJECT_MEMBER:
endContext();
case RECORD_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR:
endContext();
return getNextRuleForTypeDescriptor();
case BLOCK_STMT:
endContext();
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.IF_BLOCK) {
endContext();
return ParserRuleContext.ELSE_BLOCK;
} else if (parentCtx == ParserRuleContext.WHILE_BLOCK) {
endContext();
return ParserRuleContext.STATEMENT;
}
return ParserRuleContext.STATEMENT;
case MAPPING_CONSTRUCTOR:
endContext();
parentCtx = getParentContext();
if (parentCtx != ParserRuleContext.ANNOTATIONS) {
return ParserRuleContext.EXPRESSION_RHS;
}
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return ParserRuleContext.AT;
}
endContext();
parentCtx = getParentContext();
switch (parentCtx) {
case COMP_UNIT:
return ParserRuleContext.TOP_LEVEL_NODE_WITHOUT_METADATA;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.TYPE_DESCRIPTOR;
case RECORD_FIELD:
return ParserRuleContext.RECORD_FIELD_WITHOUT_METADATA;
case OBJECT_MEMBER:
return ParserRuleContext.OBJECT_MEMBER_WITHOUT_METADATA;
case SERVICE_DECL:
return ParserRuleContext.RESOURCE_DEF;
case FUNC_BODY_BLOCK:
return ParserRuleContext.STATEMENT_WITHOUT_ANNOTS;
case EXTERNAL_FUNC_BODY:
return ParserRuleContext.EXTERNAL_KEYWORD;
default:
if (isParameter(parentCtx)) {
return ParserRuleContext.REQUIRED_PARAM;
}
throw new IllegalStateException("annotation is ending inside a " + parentCtx);
}
default:
throw new IllegalStateException("found close-brace in: " + parentCtx);
}
}
/**
* Get the next parser context to visit after a variable/parameter name.
*
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
private ParserRuleContext getNextRuleForVarName(int nextLookahead) {
STToken nextToken = this.tokenReader.peek(nextLookahead);
ParserRuleContext parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.REQUIRED_PARAM) {
if (isEndOfParametersList(nextToken)) {
return ParserRuleContext.CLOSE_PARENTHESIS;
} else if (isEndOfParameter(nextToken)) {
return ParserRuleContext.COMMA;
} else {
switchContext(ParserRuleContext.DEFAULTABLE_PARAM);
if (isCompoundBinaryOperator(nextToken.kind)) {
return ParserRuleContext.COMPOUND_BINARY_OPERATOR;
} else {
return ParserRuleContext.ASSIGN_OP;
}
}
} else if (parentCtx == ParserRuleContext.DEFAULTABLE_PARAM) {
if (isEndOfParametersList(nextToken)) {
return ParserRuleContext.CLOSE_PARENTHESIS;
} else {
return ParserRuleContext.ASSIGN_OP;
}
} else if (isStatement(parentCtx) || parentCtx == ParserRuleContext.LISTENER_DECL ||
parentCtx == ParserRuleContext.CONSTANT_DECL) {
return ParserRuleContext.VAR_DECL_STMT_RHS;
} else if (parentCtx == ParserRuleContext.RECORD_FIELD) {
return ParserRuleContext.FIELD_DESCRIPTOR_RHS;
} else if (parentCtx == ParserRuleContext.ARG) {
return ParserRuleContext.NAMED_OR_POSITIONAL_ARG_RHS;
} else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) {
return ParserRuleContext.OBJECT_FIELD_RHS;
} else {
throw new IllegalStateException();
}
}
/**
* Check whether the given token kind is a compound binary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a binary operator. <code>false</code> otherwise
*/
private boolean isCompoundBinaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
return true;
default:
return false;
}
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @param nextLookahead Position of the next token to consider, relative to the position of the original error
* @return Next parser context
*/
private ParserRuleContext getNextRuleForSemicolon(int nextLookahead) {
STToken nextToken;
ParserRuleContext parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.EXTERNAL_FUNC_BODY) {
endContext();
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
} else if (isExpression(parentCtx)) {
endContext();
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
return ParserRuleContext.CLOSE_BRACE;
}
return ParserRuleContext.STATEMENT;
} else if (parentCtx == ParserRuleContext.VAR_DECL_STMT) {
endContext();
parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.COMP_UNIT) {
return ParserRuleContext.TOP_LEVEL_NODE;
}
return ParserRuleContext.STATEMENT;
} else if (isStatement(parentCtx)) {
endContext();
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
return ParserRuleContext.CLOSE_BRACE;
}
return ParserRuleContext.STATEMENT;
} else if (parentCtx == ParserRuleContext.RECORD_FIELD) {
if (isEndOfBlock(this.tokenReader.peek(nextLookahead))) {
endContext();
return ParserRuleContext.RECORD_BODY_END;
}
return ParserRuleContext.RECORD_FIELD;
} else if (parentCtx == ParserRuleContext.MODULE_TYPE_DEFINITION ||
parentCtx == ParserRuleContext.LISTENER_DECL || parentCtx == ParserRuleContext.CONSTANT_DECL) {
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
} else if (parentCtx == ParserRuleContext.OBJECT_MEMBER) {
if (isEndOfObjectTypeNode(nextLookahead)) {
endContext();
return ParserRuleContext.CLOSE_BRACE;
}
return ParserRuleContext.OBJECT_MEMBER;
} else if (parentCtx == ParserRuleContext.IMPORT_DECL) {
endContext();
nextToken = this.tokenReader.peek(nextLookahead);
if (nextToken.kind == SyntaxKind.EOF_TOKEN) {
return ParserRuleContext.EOF;
}
return ParserRuleContext.TOP_LEVEL_NODE;
} else {
throw new IllegalStateException();
}
}
private ParserRuleContext getNextRuleForDot() {
ParserRuleContext parentCtx = getParentContext();
if (parentCtx == ParserRuleContext.IMPORT_DECL) {
return ParserRuleContext.IMPORT_MODULE_NAME;
}
return ParserRuleContext.FIELD_OR_FUNC_NAME;
}
/**
* Get the next parser context to visit after a {@link ParserRuleContext
*
* @return Next parser context
*/
private ParserRuleContext getNextRuleForQuestionMark() {
ParserRuleContext parentCtx = getParentContext();
switch (parentCtx) {
case OPTIONAL_TYPE_DESCRIPTOR:
endContext();
parentCtx = getParentContext();
switch (parentCtx) {
case MODULE_TYPE_DEFINITION:
return ParserRuleContext.SEMICOLON;
case RETURN_TYPE_DESCRIPTOR:
return ParserRuleContext.FUNC_BODY;
default:
return ParserRuleContext.VARIABLE_NAME;
}
default:
return ParserRuleContext.SEMICOLON;
}
}
/**
* Check whether the given context is a statement.
*
* @param ctx Parser context to check
* @return <code>true</code> if the given context is a statement. <code>false</code> otherwise
*/
private boolean isStatement(ParserRuleContext parentCtx) {
switch (parentCtx) {
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case VAR_DECL_STMT:
case ASSIGNMENT_STMT:
case ASSIGNMENT_OR_VAR_DECL_STMT:
case IF_BLOCK:
case BLOCK_STMT:
case WHILE_BLOCK:
case CALL_STMT:
case PANIC_STMT:
case CONTINUE_STATEMENT:
case BREAK_STATEMENT:
case RETURN_STMT:
case COMPOUND_ASSIGNMENT_STMT:
return true;
default:
return false;
}
}
/**
* Check whether the given context is an expression.
*
* @param ctx Parser context to check
* @return <code>true</code> if the given context is an expression. <code>false</code> otherwise
*/
private boolean isExpression(ParserRuleContext ctx) {
return ctx == ParserRuleContext.EXPRESSION;
}
/**
* Check whether the given token refers to a binary operator.
*
* @param token Token to check
* @return <code>true</code> if the given token refers to a binary operator. <code>false</code> otherwise
*/
private boolean isBinaryOperator(STToken token) {
switch (token.kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case SLASH_TOKEN:
case ASTERISK_TOKEN:
case GT_TOKEN:
case LT_TOKEN:
case EQUAL_GT_TOKEN:
case DOUBLE_EQUAL_TOKEN:
case TRIPPLE_EQUAL_TOKEN:
case LT_EQUAL_TOKEN:
case GT_EQUAL_TOKEN:
case NOT_EQUAL_TOKEN:
case NOT_DOUBLE_EQUAL_TOKEN:
case BITWISE_AND_TOKEN:
case BITWISE_XOR_TOKEN:
case PIPE_TOKEN:
case LOGICAL_AND_TOKEN:
case LOGICAL_OR_TOKEN:
return true;
default:
return false;
}
}
private boolean isParameter(ParserRuleContext ctx) {
switch (ctx) {
case REQUIRED_PARAM:
case DEFAULTABLE_PARAM:
case REST_PARAM:
return true;
default:
return false;
}
}
/**
* Get the expected token kind at the given parser rule context. If the parser rule is a terminal,
* then the corresponding terminal token kind is returned. If the parser rule is a production,
* then {@link SyntaxKind
*
* @param ctx Parser rule context
* @return Token kind expected at the given parser rule
*/
private SyntaxKind getExpectedTokenKind(ParserRuleContext ctx) {
switch (ctx) {
case ASSIGN_OP:
return SyntaxKind.EQUAL_TOKEN;
case BINARY_OPERATOR:
return SyntaxKind.PLUS_TOKEN;
case CLOSE_BRACE:
return SyntaxKind.CLOSE_BRACE_TOKEN;
case CLOSE_PARENTHESIS:
return SyntaxKind.CLOSE_PAREN_TOKEN;
case COMMA:
return SyntaxKind.COMMA_TOKEN;
case EXTERNAL_KEYWORD:
return SyntaxKind.EXTERNAL_KEYWORD;
case FUNCTION_KEYWORD:
return SyntaxKind.FUNCTION_KEYWORD;
case FUNC_NAME:
return SyntaxKind.IDENTIFIER_TOKEN;
case OPEN_BRACE:
return SyntaxKind.OPEN_BRACE_TOKEN;
case OPEN_PARENTHESIS:
return SyntaxKind.OPEN_PAREN_TOKEN;
case RETURN_TYPE_DESCRIPTOR:
case RETURNS_KEYWORD:
return SyntaxKind.RETURNS_KEYWORD;
case SEMICOLON:
return SyntaxKind.SEMICOLON_TOKEN;
case VARIABLE_NAME:
case STATEMENT_START_IDENTIFIER:
return SyntaxKind.IDENTIFIER_TOKEN;
case PUBLIC_KEYWORD:
return SyntaxKind.PUBLIC_KEYWORD;
case SIMPLE_TYPE_DESCRIPTOR:
return SyntaxKind.SIMPLE_TYPE;
case ASSIGNMENT_STMT:
return SyntaxKind.IDENTIFIER_TOKEN;
case EXPRESSION_RHS:
return SyntaxKind.PLUS_TOKEN;
case EXPRESSION:
return SyntaxKind.IDENTIFIER_TOKEN;
case EXTERNAL_FUNC_BODY:
return SyntaxKind.EQUAL_TOKEN;
case FUNC_BODY:
case FUNC_BODY_BLOCK:
return SyntaxKind.OPEN_BRACE_TOKEN;
case FUNC_DEFINITION:
return SyntaxKind.FUNCTION_KEYWORD;
case REQUIRED_PARAM:
return SyntaxKind.SIMPLE_TYPE;
case VAR_DECL_STMT:
return SyntaxKind.SIMPLE_TYPE;
case VAR_DECL_STMT_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case ASSIGNMENT_OR_VAR_DECL_STMT:
return SyntaxKind.SIMPLE_TYPE;
case DEFAULTABLE_PARAM:
return SyntaxKind.SIMPLE_TYPE;
case REST_PARAM:
return SyntaxKind.SIMPLE_TYPE;
case ASTERISK:
return SyntaxKind.ASTERISK_TOKEN;
case CLOSED_RECORD_BODY_END:
return SyntaxKind.CLOSE_BRACE_PIPE_TOKEN;
case CLOSED_RECORD_BODY_START:
return SyntaxKind.OPEN_BRACE_PIPE_TOKEN;
case ELLIPSIS:
return SyntaxKind.ELLIPSIS_TOKEN;
case QUESTION_MARK:
return SyntaxKind.QUESTION_MARK_TOKEN;
case RECORD_BODY_START:
return SyntaxKind.OPEN_BRACE_PIPE_TOKEN;
case RECORD_FIELD:
case RECORD_KEYWORD:
return SyntaxKind.RECORD_KEYWORD;
case TYPE_KEYWORD:
return SyntaxKind.TYPE_KEYWORD;
case TYPE_NAME:
return SyntaxKind.IDENTIFIER_TOKEN;
case TYPE_REFERENCE:
return SyntaxKind.IDENTIFIER_TOKEN;
case RECORD_BODY_END:
return SyntaxKind.CLOSE_BRACE_TOKEN;
case OBJECT_KEYWORD:
return SyntaxKind.OBJECT_KEYWORD;
case PRIVATE_KEYWORD:
return SyntaxKind.PRIVATE_KEYWORD;
case REMOTE_KEYWORD:
return SyntaxKind.REMOTE_KEYWORD;
case OBJECT_FIELD_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case ABSTRACT_KEYWORD:
return SyntaxKind.ABSTRACT_KEYWORD;
case CLIENT_KEYWORD:
return SyntaxKind.CLIENT_KEYWORD;
case OBJECT_TYPE_FIRST_QUALIFIER:
case OBJECT_TYPE_SECOND_QUALIFIER:
return SyntaxKind.OBJECT_KEYWORD;
case CLOSE_BRACKET:
return SyntaxKind.CLOSE_BRACKET_TOKEN;
case DOT:
return SyntaxKind.DOT_TOKEN;
case FIELD_OR_FUNC_NAME:
return SyntaxKind.IDENTIFIER_TOKEN;
case OPEN_BRACKET:
return SyntaxKind.OPEN_BRACKET_TOKEN;
case IF_KEYWORD:
return SyntaxKind.IF_KEYWORD;
case ELSE_KEYWORD:
return SyntaxKind.ELSE_KEYWORD;
case WHILE_KEYWORD:
return SyntaxKind.WHILE_KEYWORD;
case CHECKING_KEYWORD:
return SyntaxKind.CHECK_KEYWORD;
case AS_KEYWORD:
return SyntaxKind.AS_KEYWORD;
case BOOLEAN_LITERAL:
return SyntaxKind.TRUE_KEYWORD;
case IMPORT_KEYWORD:
return SyntaxKind.IMPORT_KEYWORD;
case IMPORT_MODULE_NAME:
case IMPORT_ORG_OR_MODULE_NAME:
case IMPORT_PREFIX:
case VARIABLE_REF:
case BASIC_LITERAL:
case SERVICE_NAME:
case IDENTIFIER:
case QUALIFIED_IDENTIFIER:
return SyntaxKind.IDENTIFIER_TOKEN;
case VERSION_NUMBER:
case MAJOR_VERSION:
case MINOR_VERSION:
case PATCH_VERSION:
return SyntaxKind.DECIMAL_INTEGER_LITERAL;
case SLASH:
return SyntaxKind.SLASH_TOKEN;
case VERSION_KEYWORD:
return SyntaxKind.VERSION_KEYWORD;
case IMPORT_DECL_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case IMPORT_SUB_VERSION:
return SyntaxKind.SEMICOLON_TOKEN;
case COLON:
return SyntaxKind.COLON_TOKEN;
case MAPPING_FIELD_NAME:
case MAPPING_FIELD:
return SyntaxKind.IDENTIFIER_TOKEN;
case PANIC_KEYWORD:
return SyntaxKind.PANIC_KEYWORD;
case STRING_LITERAL:
return SyntaxKind.STRING_LITERAL;
case ON_KEYWORD:
return SyntaxKind.ON_KEYWORD;
case RESOURCE_KEYWORD:
return SyntaxKind.RESOURCE_KEYWORD;
case RETURN_KEYWORD:
return SyntaxKind.RETURN_KEYWORD;
case SERVICE_KEYWORD:
return SyntaxKind.SERVICE_KEYWORD;
case BREAK_KEYWORD:
return SyntaxKind.BREAK_KEYWORD;
case LISTENER_KEYWORD:
return SyntaxKind.CONST_KEYWORD;
case CONTINUE_KEYWORD:
return SyntaxKind.CONTINUE_KEYWORD;
case CONST_KEYWORD:
return SyntaxKind.CONST_KEYWORD;
case FINAL_KEYWORD:
return SyntaxKind.FINAL_KEYWORD;
case CONST_DECL_TYPE:
return SyntaxKind.IDENTIFIER_TOKEN;
case NIL_TYPE_DESCRIPTOR:
return SyntaxKind.NIL_TYPE;
case TYPEOF_KEYWORD:
return SyntaxKind.TYPEOF_KEYWORD;
case OPTIONAL_TYPE_DESCRIPTOR:
return SyntaxKind.OPTIONAL_TYPE;
case UNARY_OPERATOR:
return SyntaxKind.PLUS_TOKEN;
case AT:
return SyntaxKind.AT_TOKEN;
case FIELD_DESCRIPTOR_RHS:
return SyntaxKind.SEMICOLON_TOKEN;
case AFTER_PARAMETER_TYPE:
return SyntaxKind.IDENTIFIER_TOKEN;
case CONST_DECL_RHS:
return SyntaxKind.EQUAL_TOKEN;
case IS_KEYWORD:
return SyntaxKind.IS_KEYWORD;
case TYPE_DESCRIPTOR:
return SyntaxKind.SIMPLE_TYPE;
case COMP_UNIT:
case TOP_LEVEL_NODE:
case TOP_LEVEL_NODE_WITHOUT_METADATA:
case TOP_LEVEL_NODE_WITHOUT_MODIFIER:
case ANNOTATIONS:
case PARAM_LIST:
case PARAMETER_RHS:
case STATEMENT:
case STATEMENT_WITHOUT_ANNOTS:
case FIELD_OR_REST_DESCIPTOR_RHS:
case MODULE_TYPE_DEFINITION:
case RECORD_TYPE_DESCRIPTOR:
case ARG:
case ARG_LIST:
case EOF:
case FUNC_CALL:
case NAMED_OR_POSITIONAL_ARG_RHS:
case OBJECT_FUNC_OR_FIELD:
case OBJECT_FUNC_OR_FIELD_WITHOUT_VISIBILITY:
case OBJECT_MEMBER:
case OBJECT_METHOD_START:
case OBJECT_TYPE_DESCRIPTOR:
case OBJECT_TYPE_DESCRIPTOR_START:
case AFTER_IMPORT_MODULE_NAME:
case ASSIGNMENT_OR_VAR_DECL_STMT_RHS:
case BLOCK_STMT:
case CALL_STMT:
case CALL_STMT_START:
case DECIMAL_INTEGER_LITERAL:
case ELSE_BLOCK:
case ELSE_BODY:
case IF_BLOCK:
case IMPORT_DECL:
case IMPORT_PREFIX_DECL:
case MAJOR_MINOR_VERSION_END:
case WHILE_BLOCK:
case ACCESS_EXPRESSION:
case IMPORT_VERSION_DECL:
case MAPPING_CONSTRUCTOR:
case PANIC_STMT:
case SPECIFIC_FIELD_RHS:
case COMPUTED_FIELD_NAME:
case LISTENERS_LIST:
case RESOURCE_DEF:
case RETURN_STMT:
case RETURN_STMT_RHS:
case SERVICE_DECL:
case OPTIONAL_SERVICE_NAME:
case BREAK_STATEMENT:
case CONTINUE_STATEMENT:
case LISTENER_DECL:
case CONSTANT_DECL:
case ANNOT_REFERENCE:
case DOC_STRING:
case OBJECT_MEMBER_WITHOUT_METADATA:
case IS_EXPRESSION:
default:
break;
}
return SyntaxKind.NONE;
}
/**
* Check whether a token kind is a basic literal.
*
* @param kind Token kind to check
* @return <code>true</code> if the given token kind belongs to a basic literal.<code>false</code> otherwise
*/
private boolean isBasicLiteral(SyntaxKind kind) {
switch (kind) {
case DECIMAL_INTEGER_LITERAL:
case HEX_INTEGER_LITERAL:
case STRING_LITERAL:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
return true;
default:
return false;
}
}
/**
* Check whether the given token refers to a unary operator.
*
* @param token Token to check
* @return <code>true</code> if the given token refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(STToken token) {
switch (token.kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Search for matching token sequences within is expression and returns the most optimal solution.
*
* @param currentCtx Current context
* @param lookahead Position of the next token to consider, relative to the position of the original error
* @param currentDepth Amount of distance traveled so far
* @param currentMatches Matching tokens found so far
* @param fixes Fixes made so far
* @return Recovery result
*/
private Result seekInIsExpression(ParserRuleContext currentCtx, int lookahead, int currentDepth,
int currentMatches) {
STToken nextToken = this.tokenReader.peek(lookahead);
currentDepth++;
if (nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN) {
Result fixedPathResult = fixAndContinue(currentCtx, lookahead, currentDepth);
return getFinalResult(currentMatches, fixedPathResult);
}
ParserRuleContext nextContext;
STToken nextNextToken = this.tokenReader.peek(lookahead + 1);
switch (nextNextToken.kind) {
case IS_KEYWORD:
startContext(ParserRuleContext.IS_EXPRESSION);
nextContext = ParserRuleContext.IS_KEYWORD;
break;
default:
nextContext = ParserRuleContext.EXPRESSION_RHS;
break;
}
currentMatches++;
lookahead++;
Result result = seekMatch(nextContext, lookahead, currentDepth);
result.ctx = currentCtx;
return getFinalResult(currentMatches, result);
}
public ParserRuleContext findBestPath(ParserRuleContext context) {
int prevLookahead = lookaheadLimit;
lookaheadLimit = (int) (lookaheadLimit * 1.5);
ParserRuleContext[] alternatives;
switch (context) {
case STATEMENT:
alternatives = STATEMENTS;
break;
case TOP_LEVEL_NODE:
alternatives = TOP_LEVEL_NODE;
break;
case OBJECT_MEMBER:
alternatives = OBJECT_MEMBER_START;
break;
default:
throw new IllegalStateException();
}
Result result = seekInAlternativesPaths(1, 0, 0, alternatives);
lookaheadLimit = prevLookahead;
return result.ctx;
}
/**
* Represents a solution/fix for a parser error. A {@link Solution} consists of the parser context where the error
* was encountered, the enclosing parser context at the same point, the token with the error, and the {@link Action}
* required to recover from the error.
*
* @since 1.2.0
*/
public static class Solution {
public ParserRuleContext ctx;
public Action action;
public String tokenText;
public SyntaxKind tokenKind;
public STNode recoveredNode;
public Solution(Action action, ParserRuleContext ctx, SyntaxKind tokenKind, String tokenText) {
this.action = action;
this.ctx = ctx;
this.tokenText = tokenText;
this.tokenKind = tokenKind;
}
@Override
public String toString() {
return action.toString() + "'" + tokenText + "'";
}
}
/**
* Represent a result of a token-sequence-search in a sub-tree. The result will contain the fixes required to
* traverse in that sub-tree, and the number of matching tokens it found, without the fixed tokens.
*/
public static class Result {
private int matches;
private ArrayDeque<Solution> fixes;
/**
* Represent the end solution to be applied to the next immediate token, to recover from the error.
* If the solution is to insert/remove next immediate token, then this is equivalent to the
* <code>fixes.peek()</code>. Else, if the solution is to insert/remove a token that is not the
* immediate next token, then this will have a solution with {@link Action
*/
private Solution solution;
private ParserRuleContext ctx;
public Result(ArrayDeque<Solution> fixes, int matches, ParserRuleContext ctx) {
this.fixes = fixes;
this.matches = matches;
this.ctx = ctx;
}
}
/**
* Represents the actions that can be taken to recover from a parser error.
*
* @since 1.2.0
*/
enum Action {
INSERT, REMOVE, KEEP;
}
} |
When maxScaleCount is set to a non-zero value, it's ok to be greedy because there's a cap to how much it can be assigned to one CFP instance. I agree that the system might still go though couple rounds of load balancing itself but much less than when no cap is set. The idea here is that as new instances are introduced into the system, they should start the work immediately on all the partitions that are available rather than only taking one at the time. | public List<Lease> selectLeasesToTake(List<Lease> allLeases) {
Map<String, Integer> workerToPartitionCount = new HashMap<>();
List<Lease> expiredLeases = new ArrayList<>();
Map<String, Lease> allPartitions = new HashMap<>();
this.categorizeLeases(allLeases, allPartitions, expiredLeases, workerToPartitionCount);
int partitionCount = allPartitions.size();
int workerCount = workerToPartitionCount.size();
if (partitionCount <= 0) {
return new ArrayList<Lease>();
}
int target = this.calculateTargetPartitionCount(partitionCount, workerCount);
int myCount = workerToPartitionCount.get(this.hostName);
int partitionsNeededForMe = target - myCount;
if (expiredLeases.size() > 0) {
if (this.maxPartitionCount == 0 && (partitionsNeededForMe <= 0 || (partitionsNeededForMe > 1 && workerToPartitionCount.size() > 1))) {
partitionsNeededForMe = 1;
}
for (Lease lease : expiredLeases) {
this.logger.info("Found unused or expired lease {}; previous lease count for instance owner {} is {}, count of leases to target is {} and maxScaleCount {} ",
lease.getLeaseToken(), this.hostName, myCount, partitionsNeededForMe, this.maxPartitionCount);
}
return expiredLeases.subList(0, Math.min(partitionsNeededForMe, expiredLeases.size()));
}
if (partitionsNeededForMe <= 0)
return new ArrayList<Lease>();
Lease stolenLease = getLeaseToSteal(workerToPartitionCount, target, partitionsNeededForMe, allPartitions);
List<Lease> stolenLeases = new ArrayList<>();
if (stolenLease != null) {
stolenLeases.add(stolenLease);
}
return stolenLeases;
} | if (this.maxPartitionCount == 0 && (partitionsNeededForMe <= 0 || (partitionsNeededForMe > 1 && workerToPartitionCount.size() > 1))) { | public List<Lease> selectLeasesToTake(List<Lease> allLeases) {
Map<String, Integer> workerToPartitionCount = new HashMap<>();
List<Lease> expiredLeases = new ArrayList<>();
Map<String, Lease> allPartitions = new HashMap<>();
this.categorizeLeases(allLeases, allPartitions, expiredLeases, workerToPartitionCount);
int partitionCount = allPartitions.size();
int workerCount = workerToPartitionCount.size();
if (partitionCount <= 0) {
return new ArrayList<Lease>();
}
int target = this.calculateTargetPartitionCount(partitionCount, workerCount);
int myCount = workerToPartitionCount.get(this.hostName);
int partitionsNeededForMe = target - myCount;
if (expiredLeases.size() > 0) {
if ((this.maxPartitionCount == 0 && partitionsNeededForMe <= 0) || (partitionsNeededForMe > 1 && workerToPartitionCount.size() > 1)) {
partitionsNeededForMe = 1;
}
if (partitionsNeededForMe == 1) {
Random random = new Random();
Lease expiredLease = expiredLeases.get(random.nextInt(expiredLeases.size()));
this.logger.info("Found unused or expired lease {} (owner was {}); previous lease count for instance owner {} is {}, count of leases to target is {} and maxScaleCount {} ",
expiredLease.getLeaseToken(), expiredLease.getOwner(), this.hostName, myCount, partitionsNeededForMe, this.maxPartitionCount);
return Collections.singletonList(expiredLease);
} else {
for (Lease lease : expiredLeases) {
this.logger.info("Found unused or expired lease {} (owner was {}); previous lease count for instance owner {} is {} and maxScaleCount {} ",
lease.getLeaseToken(), lease.getOwner(), this.hostName, myCount, this.maxPartitionCount);
}
}
return expiredLeases.subList(0, Math.min(partitionsNeededForMe, expiredLeases.size()));
}
if (partitionsNeededForMe <= 0)
return new ArrayList<Lease>();
Lease stolenLease = getLeaseToSteal(workerToPartitionCount, target, partitionsNeededForMe, allPartitions);
List<Lease> stolenLeases = new ArrayList<>();
if (stolenLease != null) {
stolenLeases.add(stolenLease);
}
return stolenLeases;
} | class EqualPartitionsBalancingStrategy implements PartitionLoadBalancingStrategy {
private final Logger logger = LoggerFactory.getLogger(EqualPartitionsBalancingStrategy.class);
private final String hostName;
private final int minPartitionCount;
private final int maxPartitionCount;
private final Duration leaseExpirationInterval;
public EqualPartitionsBalancingStrategy(String hostName, int minPartitionCount, int maxPartitionCount, Duration leaseExpirationInterval) {
if (hostName == null) {
throw new IllegalArgumentException("hostName");
}
this.hostName = hostName;
this.minPartitionCount = minPartitionCount;
this.maxPartitionCount = maxPartitionCount;
this.leaseExpirationInterval = leaseExpirationInterval;
}
@Override
private static Lease getLeaseToSteal(
Map<String, Integer> workerToPartitionCount,
int target,
int partitionsNeededForMe,
Map<String, Lease> allPartitions) {
Map.Entry<String, Integer> workerToStealFrom = findWorkerWithMostPartitions(workerToPartitionCount);
if (workerToStealFrom.getValue() > target - (partitionsNeededForMe > 1 ? 1 : 0)) {
for (Map.Entry<String, Lease> entry : allPartitions.entrySet()) {
if (entry.getValue().getOwner().equalsIgnoreCase(workerToStealFrom.getKey())) {
return entry.getValue();
}
}
}
return null;
}
private static Map.Entry<String, Integer> findWorkerWithMostPartitions(Map<String, Integer> workerToPartitionCount) {
Map.Entry<String, Integer> workerToStealFrom = new ChangeFeedHelper.KeyValuePair<>("", 0);
for (Map.Entry<String, Integer> entry : workerToPartitionCount.entrySet()) {
if (workerToStealFrom.getValue() <= entry.getValue()) {
workerToStealFrom = entry;
}
}
return workerToStealFrom;
}
private int calculateTargetPartitionCount(int partitionCount, int workerCount) {
int target = 1;
if (partitionCount > workerCount) {
target = (int)Math.ceil((double)partitionCount / workerCount);
}
if (this.maxPartitionCount > 0 && target > this.maxPartitionCount) {
target = this.maxPartitionCount;
}
if (this.minPartitionCount > 0 && target < this.minPartitionCount) {
target = this.minPartitionCount;
}
return target;
}
private void categorizeLeases(
List<Lease> allLeases,
Map<String, Lease> allPartitions,
List<Lease> expiredLeases,
Map<String, Integer> workerToPartitionCount) {
for (Lease lease : allLeases) {
allPartitions.put(lease.getLeaseToken(), lease);
if (lease.getOwner() == null || lease.getOwner().isEmpty() || this.isExpired(lease)) {
expiredLeases.add(lease);
} else {
String assignedTo = lease.getOwner();
Integer count = workerToPartitionCount.get(assignedTo);
if (count != null) {
workerToPartitionCount.replace(assignedTo, count + 1);
} else {
workerToPartitionCount.put(assignedTo, 1);
}
}
}
if (!workerToPartitionCount.containsKey(this.hostName)) {
workerToPartitionCount.put(this.hostName, 0);
}
}
private boolean isExpired(Lease lease) {
if (lease.getOwner() == null || lease.getOwner().isEmpty() || lease.getTimestamp() == null) {
return true;
}
Instant leaseExpireTime = Instant.parse(lease.getTimestamp()).plus(this.leaseExpirationInterval);
this.logger.debug("Current lease timestamp: {}, current time: {}", leaseExpireTime, Instant.now());
return leaseExpireTime.isBefore(Instant.now());
}
} | class EqualPartitionsBalancingStrategy implements PartitionLoadBalancingStrategy {
private final Logger logger = LoggerFactory.getLogger(EqualPartitionsBalancingStrategy.class);
private final String hostName;
private final int minPartitionCount;
private final int maxPartitionCount;
private final Duration leaseExpirationInterval;
public EqualPartitionsBalancingStrategy(String hostName, int minPartitionCount, int maxPartitionCount, Duration leaseExpirationInterval) {
if (hostName == null) {
throw new IllegalArgumentException("hostName");
}
this.hostName = hostName;
this.minPartitionCount = minPartitionCount;
this.maxPartitionCount = maxPartitionCount;
this.leaseExpirationInterval = leaseExpirationInterval;
}
@Override
private static Lease getLeaseToSteal(
Map<String, Integer> workerToPartitionCount,
int target,
int partitionsNeededForMe,
Map<String, Lease> allPartitions) {
Map.Entry<String, Integer> workerToStealFrom = findWorkerWithMostPartitions(workerToPartitionCount);
if (workerToStealFrom.getValue() > target - (partitionsNeededForMe > 1 ? 1 : 0)) {
for (Map.Entry<String, Lease> entry : allPartitions.entrySet()) {
if (entry.getValue().getOwner().equalsIgnoreCase(workerToStealFrom.getKey())) {
return entry.getValue();
}
}
}
return null;
}
private static Map.Entry<String, Integer> findWorkerWithMostPartitions(Map<String, Integer> workerToPartitionCount) {
Map.Entry<String, Integer> workerToStealFrom = new ChangeFeedHelper.KeyValuePair<>("", 0);
for (Map.Entry<String, Integer> entry : workerToPartitionCount.entrySet()) {
if (workerToStealFrom.getValue() <= entry.getValue()) {
workerToStealFrom = entry;
}
}
return workerToStealFrom;
}
private int calculateTargetPartitionCount(int partitionCount, int workerCount) {
int target = 1;
if (partitionCount > workerCount) {
target = (int)Math.ceil((double)partitionCount / workerCount);
}
if (this.maxPartitionCount > 0 && target > this.maxPartitionCount) {
target = this.maxPartitionCount;
}
if (this.minPartitionCount > 0 && target < this.minPartitionCount) {
target = this.minPartitionCount;
}
return target;
}
private void categorizeLeases(
List<Lease> allLeases,
Map<String, Lease> allPartitions,
List<Lease> expiredLeases,
Map<String, Integer> workerToPartitionCount) {
for (Lease lease : allLeases) {
allPartitions.put(lease.getLeaseToken(), lease);
if (lease.getOwner() == null || lease.getOwner().isEmpty() || this.isExpired(lease)) {
expiredLeases.add(lease);
} else {
String assignedTo = lease.getOwner();
Integer count = workerToPartitionCount.get(assignedTo);
if (count != null) {
workerToPartitionCount.replace(assignedTo, count + 1);
} else {
workerToPartitionCount.put(assignedTo, 1);
}
}
}
if (!workerToPartitionCount.containsKey(this.hostName)) {
workerToPartitionCount.put(this.hostName, 0);
}
}
private boolean isExpired(Lease lease) {
if (lease.getOwner() == null || lease.getOwner().isEmpty() || lease.getTimestamp() == null) {
return true;
}
Instant leaseExpireTime = Instant.parse(lease.getTimestamp()).plus(this.leaseExpirationInterval);
this.logger.debug("Current lease timestamp: {}, current time: {}", leaseExpireTime, Instant.now());
return leaseExpireTime.isBefore(Instant.now());
}
} |
ah, yes, I have seen. I should have changed it in the test instead in the provider class. Now (I think) it's ok | public Iterable<ConfigSource> getConfigSources(ClassLoader forClassLoader) {
List<ConfigSource> result = new ArrayList<>();
try {
Response response = springCloudConfigClientGateway.exchange(applicationName, activeProfile).await()
.atMost(springCloudConfigClientConfig.readTimeout);
if (response != null) {
final List<Response.PropertySource> propertySources = response.getPropertySources();
Collections.reverse(propertySources);
for (int i = 0; i < propertySources.size(); i++) {
final Response.PropertySource propertySource = propertySources.get(i);
result.add(new InMemoryConfigSource(450 + i, propertySource.getName(),
propertySource.getSource()));
}
}
} catch (Exception e) {
final String errorMessage = "Unable to obtain configuration from Spring Cloud Config Server at "
+ springCloudConfigClientConfig.url;
if (springCloudConfigClientConfig.failFast) {
throw new RuntimeException(errorMessage, e);
} else {
log.error(errorMessage, e);
return Collections.emptyList();
}
} finally {
springCloudConfigClientGateway.close();
}
return result;
} | .atMost(springCloudConfigClientConfig.readTimeout); | public Iterable<ConfigSource> getConfigSources(ClassLoader forClassLoader) {
List<ConfigSource> result = new ArrayList<>();
try {
boolean connectionTimeoutIsGreaterThanZero = !springCloudConfigClientConfig.connectionTimeout.isNegative()
&& !springCloudConfigClientConfig.connectionTimeout.isZero();
boolean readTimeoutIsGreaterThanZero = !springCloudConfigClientConfig.readTimeout.isNegative()
&& !springCloudConfigClientConfig.readTimeout.isZero();
Response response;
if (connectionTimeoutIsGreaterThanZero || readTimeoutIsGreaterThanZero)
response = springCloudConfigClientGateway.exchange(applicationName, activeProfile).await()
.atMost(springCloudConfigClientConfig.connectionTimeout
.plus(springCloudConfigClientConfig.readTimeout.multipliedBy(2)));
else {
response = springCloudConfigClientGateway.exchange(applicationName, activeProfile).await().indefinitely();
}
if (response != null) {
final List<Response.PropertySource> propertySources = response.getPropertySources();
Collections.reverse(propertySources);
for (int i = 0; i < propertySources.size(); i++) {
final Response.PropertySource propertySource = propertySources.get(i);
result.add(new InMemoryConfigSource(450 + i, propertySource.getName(),
propertySource.getSource()));
}
}
} catch (Exception e) {
final String errorMessage = "Unable to obtain configuration from Spring Cloud Config Server at "
+ springCloudConfigClientConfig.url;
if (springCloudConfigClientConfig.failFast) {
throw new RuntimeException(errorMessage, e);
} else {
log.error(errorMessage, e);
return Collections.emptyList();
}
} finally {
springCloudConfigClientGateway.close();
}
return result;
} | class SpringCloudConfigServerClientConfigSourceProvider implements ConfigSourceProvider {
private static final Logger log = Logger.getLogger(SpringCloudConfigServerClientConfigSourceProvider.class);
private final SpringCloudConfigClientConfig springCloudConfigClientConfig;
private final String applicationName;
private final String activeProfile;
private final SpringCloudConfigClientGateway springCloudConfigClientGateway;
public SpringCloudConfigServerClientConfigSourceProvider(SpringCloudConfigClientConfig springCloudConfigClientConfig,
String applicationName,
String activeProfile) {
this.springCloudConfigClientConfig = springCloudConfigClientConfig;
this.applicationName = applicationName;
this.activeProfile = activeProfile;
springCloudConfigClientGateway = new VertxSpringCloudConfigGateway(springCloudConfigClientConfig);
}
@Override
private static final class InMemoryConfigSource implements ConfigSource {
private final Map<String, String> values = new HashMap<>();
private final int ordinal;
private final String name;
private InMemoryConfigSource(int ordinal, String name, Map<String, String> source) {
this.ordinal = ordinal;
this.name = name;
this.values.putAll(source);
}
@Override
public Map<String, String> getProperties() {
return values;
}
@Override
public Set<String> getPropertyNames() {
return values.keySet();
}
@Override
public int getOrdinal() {
return ordinal;
}
@Override
public String getValue(String propertyName) {
return values.get(propertyName);
}
@Override
public String getName() {
return name;
}
}
} | class SpringCloudConfigServerClientConfigSourceProvider implements ConfigSourceProvider {
private static final Logger log = Logger.getLogger(SpringCloudConfigServerClientConfigSourceProvider.class);
private final SpringCloudConfigClientConfig springCloudConfigClientConfig;
private final String applicationName;
private final String activeProfile;
private final SpringCloudConfigClientGateway springCloudConfigClientGateway;
public SpringCloudConfigServerClientConfigSourceProvider(SpringCloudConfigClientConfig springCloudConfigClientConfig,
String applicationName,
String activeProfile) {
this.springCloudConfigClientConfig = springCloudConfigClientConfig;
this.applicationName = applicationName;
this.activeProfile = activeProfile;
springCloudConfigClientGateway = new VertxSpringCloudConfigGateway(springCloudConfigClientConfig);
}
@Override
private static final class InMemoryConfigSource implements ConfigSource {
private final Map<String, String> values = new HashMap<>();
private final int ordinal;
private final String name;
private InMemoryConfigSource(int ordinal, String name, Map<String, String> source) {
this.ordinal = ordinal;
this.name = name;
this.values.putAll(source);
}
@Override
public Map<String, String> getProperties() {
return values;
}
@Override
public Set<String> getPropertyNames() {
return values.keySet();
}
@Override
public int getOrdinal() {
return ordinal;
}
@Override
public String getValue(String propertyName) {
return values.get(propertyName);
}
@Override
public String getName() {
return name;
}
}
} |
@galderz Being a test framework, I think it could do better at letting you tweak the headers manually. The topic is not new to the RestAssured GitHub though. I will make a TODO to follow up. | public static void runTest(String endpoint, String acceptEncoding, String contentEncoding, String contentLength) {
LOG.infof("Endpoint %s; Accept-Encoding: %s; Content-Encoding: %s; Content-Length: %s",
endpoint, acceptEncoding, contentEncoding, contentLength);
final WebClient client = WebClient.create(Vertx.vertx(), new WebClientOptions()
.setLogActivity(true)
.setFollowRedirects(true)
.setDecompressionSupported(false));
final CompletableFuture<HttpResponse<Buffer>> future = new CompletableFuture<>();
client.requestAbs(HttpMethod.GET, endpoint)
.putHeader(HttpHeaders.ACCEPT_ENCODING.toString(), acceptEncoding)
.putHeader(HttpHeaders.ACCEPT.toString(), "*/*")
.putHeader(HttpHeaders.USER_AGENT.toString(), "Tester")
.send(ar -> {
if (ar.succeeded()) {
future.complete(ar.result());
} else {
future.completeExceptionally(ar.cause());
}
});
try {
final HttpResponse<Buffer> response = future.get();
final String actualEncoding = response.headers().get("content-encoding");
assertEquals(OK.code(), response.statusCode(),
"Http status must be OK.");
assertEquals(contentEncoding, actualEncoding,
"Unexpected compressor selected.");
final int receivedLength = parseInt(response.headers().get("content-length"));
final int expectedLength = parseInt(contentLength);
assertTrue(receivedLength <= expectedLength,
"Compression apparently failed: receivedLength: " + receivedLength + ", expectedLength: " + expectedLength);
final String body;
if (actualEncoding != null && !"identity".equalsIgnoreCase(actualEncoding)) {
EmbeddedChannel channel = null;
if ("gzip".equalsIgnoreCase(actualEncoding)) {
channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.GZIP));
} else if ("deflate".equalsIgnoreCase(actualEncoding)) {
channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.ZLIB));
} else if ("br".equalsIgnoreCase(actualEncoding)) {
channel = new EmbeddedChannel(new BrotliDecoder());
} else {
fail("Unexpected compression used by server: " + actualEncoding);
}
channel.writeInbound(Unpooled.copiedBuffer(response.body().getBytes()));
channel.finish();
final ByteBuf decompressed = channel.readInbound();
body = decompressed.readCharSequence(decompressed.readableBytes(), StandardCharsets.UTF_8).toString();
} else {
body = response.body().toString(StandardCharsets.UTF_8);
}
assertEquals(TEXT, body,
"Unexpected body text.");
} catch (InterruptedException | ExecutionException e) {
fail(e);
}
} | .setFollowRedirects(true) | public static void runTest(String endpoint, String acceptEncoding, String contentEncoding, String contentLength) {
LOG.infof("Endpoint %s; Accept-Encoding: %s; Content-Encoding: %s; Content-Length: %s",
endpoint, acceptEncoding, contentEncoding, contentLength);
final WebClient client = WebClient.create(Vertx.vertx(), new WebClientOptions()
.setLogActivity(true)
.setFollowRedirects(true)
.setDecompressionSupported(false));
final CompletableFuture<HttpResponse<Buffer>> future = new CompletableFuture<>();
client.requestAbs(HttpMethod.GET, endpoint)
.putHeader(HttpHeaders.ACCEPT_ENCODING.toString(), acceptEncoding)
.putHeader(HttpHeaders.ACCEPT.toString(), "*/*")
.putHeader(HttpHeaders.USER_AGENT.toString(), "Tester")
.send(ar -> {
if (ar.succeeded()) {
future.complete(ar.result());
} else {
future.completeExceptionally(ar.cause());
}
});
try {
final HttpResponse<Buffer> response = future.get();
final String actualEncoding = response.headers().get("content-encoding");
assertEquals(OK.code(), response.statusCode(),
"Http status must be OK.");
assertEquals(contentEncoding, actualEncoding,
"Unexpected compressor selected.");
final int receivedLength = parseInt(response.headers().get("content-length"));
final int expectedLength = parseInt(contentLength);
if (contentEncoding == null) {
assertEquals(expectedLength, receivedLength,
"No compression was expected, so the content-length must match exactly.");
} else {
final int expectedLengthWithTolerance = expectedLength + (expectedLength / 100 * COMPRESSION_TOLERANCE_PERCENT);
assertTrue(receivedLength <= expectedLengthWithTolerance,
"Compression apparently failed: receivedLength: " + receivedLength +
" was supposed to be less or equal to expectedLength: " +
expectedLength + " plus " + COMPRESSION_TOLERANCE_PERCENT + "% tolerance, i.e. "
+ expectedLengthWithTolerance + ".");
}
final String body;
if (actualEncoding != null && !"identity".equalsIgnoreCase(actualEncoding)) {
EmbeddedChannel channel = null;
if ("gzip".equalsIgnoreCase(actualEncoding)) {
channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.GZIP));
} else if ("deflate".equalsIgnoreCase(actualEncoding)) {
channel = new EmbeddedChannel(newZlibDecoder(ZlibWrapper.ZLIB));
} else if ("br".equalsIgnoreCase(actualEncoding)) {
channel = new EmbeddedChannel(new BrotliDecoder());
} else {
fail("Unexpected compression used by server: " + actualEncoding);
}
channel.writeInbound(Unpooled.copiedBuffer(response.body().getBytes()));
channel.finish();
final ByteBuf decompressed = channel.readInbound();
body = decompressed.readCharSequence(decompressed.readableBytes(), StandardCharsets.UTF_8).toString();
} else {
body = response.body().toString(StandardCharsets.UTF_8);
}
assertEquals(TEXT, body,
"Unexpected body text.");
} catch (InterruptedException | ExecutionException e) {
fail(e);
}
} | class Testflow {
/**
* This test logic is shared by both "all" module and "some" module.
* See their RESTEndpointsTest classes.
*
* @param endpoint
* @param acceptEncoding
* @param contentEncoding
* @param contentLength
*/
} | class Testflow {
public static final int COMPRESSION_TOLERANCE_PERCENT = 2;
/**
* This test logic is shared by both "all" module and "some" module.
* See their RESTEndpointsTest classes.
*
* @param endpoint
* @param acceptEncoding
* @param contentEncoding
* @param contentLength
*/
} |
what about other nested types, map, struct,json? | public ScalarOperator visitCall(CallOperator call, ScalarOperatorRewriteContext context) {
Function fn = call.getFunction();
if (fn == null) {
for (int i = 0; i < call.getChildren().size(); ++i) {
Type type = call.getType();
if (!type.matchesType(call.getChild(i).getType())) {
addCastChild(type, call, i);
}
}
} else {
if (fn.functionName().equals(FunctionSet.ARRAY_MAP) ||
fn.functionName().equals(FunctionSet.EXCHANGE_BYTES) ||
fn.functionName().equals(FunctionSet.EXCHANGE_SPEED)) {
return call;
}
if (!call.isAggregate() || FunctionSet.AVG.equalsIgnoreCase(fn.functionName())) {
Preconditions.checkArgument(Arrays.stream(fn.getArgs()).noneMatch(Type::isWildcardDecimal),
String.format("Resolved function %s has wildcard decimal as argument type", fn.functionName()));
}
boolean needAdjustScale = ArithmeticExpr.DECIMAL_SCALE_ADJUST_OPERATOR_SET
.contains(fn.getFunctionName().getFunction());
for (int i = 0; i < fn.getNumArgs(); i++) {
Type type = fn.getArgs()[i];
ScalarOperator child = call.getChild(i);
if (type.isArrayType() && child.getType().isArrayType()
&& ((ArrayType) child.getType()).getItemType().isNull()) {
addCastChild(type, call, i);
continue;
}
if ((needAdjustScale && type.isDecimalOfAnyVersion() && !type.equals(child.getType())) ||
!type.matchesType(child.getType())) {
addCastChild(type, call, i);
}
}
if (fn.hasVarArgs() && call.getChildren().size() > fn.getNumArgs()) {
Type type = fn.getVarArgsType();
for (int i = fn.getNumArgs(); i < call.getChildren().size(); i++) {
ScalarOperator child = call.getChild(i);
if (!type.matchesType(child.getType())) {
addCastChild(type, call, i);
}
}
}
}
return call;
} | } | public ScalarOperator visitCall(CallOperator call, ScalarOperatorRewriteContext context) {
Function fn = call.getFunction();
if (fn == null) {
for (int i = 0; i < call.getChildren().size(); ++i) {
Type type = call.getType();
if (!type.matchesType(call.getChild(i).getType())) {
addCastChild(type, call, i);
}
}
} else {
if (fn.functionName().equals(FunctionSet.ARRAY_MAP) ||
fn.functionName().equals(FunctionSet.EXCHANGE_BYTES) ||
fn.functionName().equals(FunctionSet.EXCHANGE_SPEED)) {
return call;
}
if (!call.isAggregate() || FunctionSet.AVG.equalsIgnoreCase(fn.functionName())) {
Preconditions.checkArgument(Arrays.stream(fn.getArgs()).noneMatch(Type::isWildcardDecimal),
String.format("Resolved function %s has wildcard decimal as argument type", fn.functionName()));
}
boolean needAdjustScale = ArithmeticExpr.DECIMAL_SCALE_ADJUST_OPERATOR_SET
.contains(fn.getFunctionName().getFunction());
for (int i = 0; i < fn.getNumArgs(); i++) {
Type type = fn.getArgs()[i];
ScalarOperator child = call.getChild(i);
if (needAdjustScale && type.isDecimalOfAnyVersion() && !type.equals(child.getType())) {
addCastChild(type, call, i);
continue;
}
if (!type.matchesType(child.getType())) {
addCastChild(type, call, i);
}
}
if (fn.hasVarArgs() && call.getChildren().size() > fn.getNumArgs()) {
Type type = fn.getVarArgsType();
for (int i = fn.getNumArgs(); i < call.getChildren().size(); i++) {
ScalarOperator child = call.getChild(i);
if (!type.matchesType(child.getType())) {
addCastChild(type, call, i);
}
}
}
}
return call;
} | class ImplicitCastRule extends TopDownScalarOperatorRewriteRule {
@Override
@Override
public ScalarOperator visitBetweenPredicate(BetweenPredicateOperator predicate,
ScalarOperatorRewriteContext context) {
return castForBetweenAndIn(predicate);
}
@Override
public ScalarOperator visitMap(MapOperator map, ScalarOperatorRewriteContext context) {
MapType mapType = (MapType) map.getType();
Type[] kvType = {mapType.getKeyType(), mapType.getValueType()};
for (int i = 0; i < map.getChildren().size(); i++) {
if (!map.getChildren().get(i).getType().matchesType(kvType[i % 2])) {
addCastChild(kvType[i % 2], map, i);
}
}
return map;
}
@Override
public ScalarOperator visitBinaryPredicate(BinaryPredicateOperator predicate,
ScalarOperatorRewriteContext context) {
ScalarOperator leftChild = predicate.getChild(0);
ScalarOperator rightChild = predicate.getChild(1);
Type type1 = leftChild.getType();
Type type2 = rightChild.getType();
if (predicate.getBinaryType() == BinaryType.EQ_FOR_NULL &&
(leftChild.isConstantNull() || rightChild.isConstantNull())) {
if (leftChild.isConstantNull()) {
predicate.setChild(0, ConstantOperator.createNull(type2));
}
if (rightChild.isConstantNull()) {
predicate.setChild(1, ConstantOperator.createNull(type1));
}
return predicate;
}
if (type1.matchesType(type2)) {
return predicate;
}
if (rightChild.isVariable() && leftChild.isConstantRef()) {
Optional<ScalarOperator> op = Utils.tryCastConstant(leftChild, type2);
if (op.isPresent()) {
predicate.getChildren().set(0, op.get());
return predicate;
} else if (rightChild.getType().isDateType() && Type.canCastTo(leftChild.getType(), rightChild.getType())) {
addCastChild(rightChild.getType(), predicate, 0);
return predicate;
}
} else if (leftChild.isVariable() && rightChild.isConstantRef()) {
Optional<ScalarOperator> op = Utils.tryCastConstant(rightChild, type1);
if (op.isPresent()) {
predicate.getChildren().set(1, op.get());
return predicate;
} else if (leftChild.getType().isDateType() && Type.canCastTo(rightChild.getType(), leftChild.getType())) {
addCastChild(leftChild.getType(), predicate, 1);
return predicate;
}
}
Type compatibleType = TypeManager.getCompatibleTypeForBinary(predicate.getBinaryType(), type1, type2);
if (!type1.matchesType(compatibleType)) {
addCastChild(compatibleType, predicate, 0);
}
if (!type2.matchesType(compatibleType)) {
addCastChild(compatibleType, predicate, 1);
}
return predicate;
}
@Override
public ScalarOperator visitCompoundPredicate(CompoundPredicateOperator predicate,
ScalarOperatorRewriteContext context) {
for (int i = 0; i < predicate.getChildren().size(); i++) {
ScalarOperator child = predicate.getChild(i);
if (!Type.BOOLEAN.matchesType(child.getType())) {
addCastChild(Type.BOOLEAN, predicate, i);
}
}
return predicate;
}
@Override
public ScalarOperator visitInPredicate(InPredicateOperator predicate, ScalarOperatorRewriteContext context) {
return castForBetweenAndIn(predicate);
}
@Override
public ScalarOperator visitMultiInPredicate(MultiInPredicateOperator predicate, ScalarOperatorRewriteContext c) {
throw new StarRocksPlannerException("Implicit casting of multi-column IN predicate is not supported.",
ErrorType.INTERNAL_ERROR);
}
@Override
public ScalarOperator visitLikePredicateOperator(LikePredicateOperator predicate,
ScalarOperatorRewriteContext context) {
Type type1 = predicate.getChild(0).getType();
Type type2 = predicate.getChild(1).getType();
if (!type1.isStringType()) {
addCastChild(Type.VARCHAR, predicate, 0);
}
if (!type2.isStringType()) {
addCastChild(Type.VARCHAR, predicate, 1);
}
return predicate;
}
@Override
public ScalarOperator visitCaseWhenOperator(CaseWhenOperator operator, ScalarOperatorRewriteContext context) {
if (operator.hasElse() && !operator.getType().matchesType(operator.getElseClause().getType())) {
operator.setElseClause(new CastOperator(operator.getType(), operator.getElseClause()));
}
for (int i = 0; i < operator.getWhenClauseSize(); i++) {
if (!operator.getType().matchesType(operator.getThenClause(i).getType())) {
operator.setThenClause(i, new CastOperator(operator.getType(), operator.getThenClause(i)));
}
}
Type compatibleType = Type.BOOLEAN;
if (operator.hasCase()) {
List<Type> whenTypes = Lists.newArrayList();
whenTypes.add(operator.getCaseClause().getType());
for (int i = 0; i < operator.getWhenClauseSize(); i++) {
whenTypes.add(operator.getWhenClause(i).getType());
}
compatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes);
if (!compatibleType.matchesType(operator.getCaseClause().getType())) {
operator.setCaseClause(new CastOperator(compatibleType, operator.getCaseClause()));
}
}
for (int i = 0; i < operator.getWhenClauseSize(); i++) {
if (!compatibleType.matchesType(operator.getWhenClause(i).getType())) {
operator.setWhenClause(i, new CastOperator(compatibleType, operator.getWhenClause(i)));
}
}
return operator;
}
private ScalarOperator castForBetweenAndIn(ScalarOperator predicate) {
Type firstType = predicate.getChildren().get(0).getType();
if (predicate.getChildren().stream().skip(1).allMatch(o -> firstType.matchesType(o.getType()))) {
return predicate;
}
List<Type> types = predicate.getChildren().stream().map(ScalarOperator::getType).collect(Collectors.toList());
if (predicate.getChild(0).isVariable() && predicate.getChildren().stream().skip(1)
.allMatch(ScalarOperator::isConstantRef)) {
List<ScalarOperator> newChild = Lists.newArrayList();
newChild.add(predicate.getChild(0));
for (int i = 1; i < types.size(); i++) {
Optional<ScalarOperator> op = Utils.tryCastConstant(predicate.getChild(i), firstType);
op.ifPresent(newChild::add);
}
if (newChild.size() == predicate.getChildren().size()) {
predicate.getChildren().clear();
predicate.getChildren().addAll(newChild);
return predicate;
}
}
Type compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(types);
for (int i = 0; i < predicate.getChildren().size(); i++) {
Type childType = predicate.getChild(i).getType();
if (!childType.matchesType(compatibleType)) {
addCastChild(compatibleType, predicate, i);
}
}
return predicate;
}
private void addCastChild(Type returnType, ScalarOperator node, int index) {
node.getChildren().set(index, new CastOperator(returnType, node.getChild(index), true));
}
} | class ImplicitCastRule extends TopDownScalarOperatorRewriteRule {
@Override
@Override
public ScalarOperator visitBetweenPredicate(BetweenPredicateOperator predicate,
ScalarOperatorRewriteContext context) {
return castForBetweenAndIn(predicate);
}
@Override
public ScalarOperator visitMap(MapOperator map, ScalarOperatorRewriteContext context) {
MapType mapType = (MapType) map.getType();
Type[] kvType = {mapType.getKeyType(), mapType.getValueType()};
for (int i = 0; i < map.getChildren().size(); i++) {
if (!map.getChildren().get(i).getType().matchesType(kvType[i % 2])) {
addCastChild(kvType[i % 2], map, i);
}
}
return map;
}
@Override
public ScalarOperator visitBinaryPredicate(BinaryPredicateOperator predicate,
ScalarOperatorRewriteContext context) {
ScalarOperator leftChild = predicate.getChild(0);
ScalarOperator rightChild = predicate.getChild(1);
Type type1 = leftChild.getType();
Type type2 = rightChild.getType();
if (predicate.getBinaryType() == BinaryType.EQ_FOR_NULL &&
(leftChild.isConstantNull() || rightChild.isConstantNull())) {
if (leftChild.isConstantNull()) {
predicate.setChild(0, ConstantOperator.createNull(type2));
}
if (rightChild.isConstantNull()) {
predicate.setChild(1, ConstantOperator.createNull(type1));
}
return predicate;
}
if (type1.matchesType(type2)) {
return predicate;
}
if (rightChild.isVariable() && leftChild.isConstantRef()) {
Optional<ScalarOperator> op = Utils.tryCastConstant(leftChild, type2);
if (op.isPresent()) {
predicate.getChildren().set(0, op.get());
return predicate;
} else if (rightChild.getType().isDateType() && Type.canCastTo(leftChild.getType(), rightChild.getType())) {
addCastChild(rightChild.getType(), predicate, 0);
return predicate;
}
} else if (leftChild.isVariable() && rightChild.isConstantRef()) {
Optional<ScalarOperator> op = Utils.tryCastConstant(rightChild, type1);
if (op.isPresent()) {
predicate.getChildren().set(1, op.get());
return predicate;
} else if (leftChild.getType().isDateType() && Type.canCastTo(rightChild.getType(), leftChild.getType())) {
addCastChild(leftChild.getType(), predicate, 1);
return predicate;
}
}
Type compatibleType = TypeManager.getCompatibleTypeForBinary(predicate.getBinaryType(), type1, type2);
if (!type1.matchesType(compatibleType)) {
addCastChild(compatibleType, predicate, 0);
}
if (!type2.matchesType(compatibleType)) {
addCastChild(compatibleType, predicate, 1);
}
return predicate;
}
@Override
public ScalarOperator visitCompoundPredicate(CompoundPredicateOperator predicate,
ScalarOperatorRewriteContext context) {
for (int i = 0; i < predicate.getChildren().size(); i++) {
ScalarOperator child = predicate.getChild(i);
if (!Type.BOOLEAN.matchesType(child.getType())) {
addCastChild(Type.BOOLEAN, predicate, i);
}
}
return predicate;
}
@Override
public ScalarOperator visitInPredicate(InPredicateOperator predicate, ScalarOperatorRewriteContext context) {
return castForBetweenAndIn(predicate);
}
@Override
public ScalarOperator visitMultiInPredicate(MultiInPredicateOperator predicate, ScalarOperatorRewriteContext c) {
throw new StarRocksPlannerException("Implicit casting of multi-column IN predicate is not supported.",
ErrorType.INTERNAL_ERROR);
}
@Override
public ScalarOperator visitLikePredicateOperator(LikePredicateOperator predicate,
ScalarOperatorRewriteContext context) {
Type type1 = predicate.getChild(0).getType();
Type type2 = predicate.getChild(1).getType();
if (!type1.isStringType()) {
addCastChild(Type.VARCHAR, predicate, 0);
}
if (!type2.isStringType()) {
addCastChild(Type.VARCHAR, predicate, 1);
}
return predicate;
}
@Override
public ScalarOperator visitCaseWhenOperator(CaseWhenOperator operator, ScalarOperatorRewriteContext context) {
if (operator.hasElse() && !operator.getType().matchesType(operator.getElseClause().getType())) {
operator.setElseClause(new CastOperator(operator.getType(), operator.getElseClause()));
}
for (int i = 0; i < operator.getWhenClauseSize(); i++) {
if (!operator.getType().matchesType(operator.getThenClause(i).getType())) {
operator.setThenClause(i, new CastOperator(operator.getType(), operator.getThenClause(i)));
}
}
Type compatibleType = Type.BOOLEAN;
if (operator.hasCase()) {
List<Type> whenTypes = Lists.newArrayList();
whenTypes.add(operator.getCaseClause().getType());
for (int i = 0; i < operator.getWhenClauseSize(); i++) {
whenTypes.add(operator.getWhenClause(i).getType());
}
compatibleType = TypeManager.getCompatibleTypeForCaseWhen(whenTypes);
if (!compatibleType.matchesType(operator.getCaseClause().getType())) {
operator.setCaseClause(new CastOperator(compatibleType, operator.getCaseClause()));
}
}
for (int i = 0; i < operator.getWhenClauseSize(); i++) {
if (!compatibleType.matchesType(operator.getWhenClause(i).getType())) {
operator.setWhenClause(i, new CastOperator(compatibleType, operator.getWhenClause(i)));
}
}
return operator;
}
private ScalarOperator castForBetweenAndIn(ScalarOperator predicate) {
Type firstType = predicate.getChildren().get(0).getType();
if (predicate.getChildren().stream().skip(1).allMatch(o -> firstType.matchesType(o.getType()))) {
return predicate;
}
List<Type> types = predicate.getChildren().stream().map(ScalarOperator::getType).collect(Collectors.toList());
if (predicate.getChild(0).isVariable() && predicate.getChildren().stream().skip(1)
.allMatch(ScalarOperator::isConstantRef)) {
List<ScalarOperator> newChild = Lists.newArrayList();
newChild.add(predicate.getChild(0));
for (int i = 1; i < types.size(); i++) {
Optional<ScalarOperator> op = Utils.tryCastConstant(predicate.getChild(i), firstType);
op.ifPresent(newChild::add);
}
if (newChild.size() == predicate.getChildren().size()) {
predicate.getChildren().clear();
predicate.getChildren().addAll(newChild);
return predicate;
}
}
Type compatibleType = TypeManager.getCompatibleTypeForBetweenAndIn(types);
for (int i = 0; i < predicate.getChildren().size(); i++) {
Type childType = predicate.getChild(i).getType();
if (!childType.matchesType(compatibleType)) {
addCastChild(compatibleType, predicate, i);
}
}
return predicate;
}
private void addCastChild(Type returnType, ScalarOperator node, int index) {
node.getChildren().set(index, new CastOperator(returnType, node.getChild(index), true));
}
} |
I mean the last line. | public void testQuiesceOfMailboxRightBeforeSubmittingActionViaTimerService() throws Exception {
AtomicBoolean submitThroughputFail = new AtomicBoolean();
MockEnvironment mockEnvironment = new MockEnvironmentBuilder().build();
final UnAvailableTestInputProcessor inputProcessor = new UnAvailableTestInputProcessor();
RunningTask<StreamTask<?, ?>> task =
runTask(
() ->
new MockStreamTaskBuilder(mockEnvironment)
.setHandleAsyncException(
(str, t) -> submitThroughputFail.set(true))
.setStreamInputProcessor(inputProcessor)
.build());
waitTaskIsRunning(task.streamTask, task.invocationFuture);
TimerService timerService = task.streamTask.systemTimerService;
MailboxExecutor mainMailboxExecutor =
task.streamTask.mailboxProcessor.getMainMailboxExecutor();
CountDownLatch stoppintMailboxLatch = new CountDownLatch(1);
timerService.registerTimer(
timerService.getCurrentProcessingTime(),
(time) -> {
stoppintMailboxLatch.await();
Thread.sleep(5);
mainMailboxExecutor.submit(() -> {}, "test");
});
mainMailboxExecutor
.submit(
() -> {
stoppintMailboxLatch.countDown();
task.streamTask.afterInvoke();
},
"test")
.get();
assertFalse(submitThroughputFail.get());
inputProcessor.availabilityProvider.getUnavailableToResetAvailable().complete(null);
} | inputProcessor.availabilityProvider.getUnavailableToResetAvailable().complete(null); | public void testQuiesceOfMailboxRightBeforeSubmittingActionViaTimerService() throws Exception {
AtomicBoolean submitThroughputFail = new AtomicBoolean();
MockEnvironment mockEnvironment = new MockEnvironmentBuilder().build();
final UnAvailableTestInputProcessor inputProcessor = new UnAvailableTestInputProcessor();
RunningTask<StreamTask<?, ?>> task =
runTask(
() ->
new MockStreamTaskBuilder(mockEnvironment)
.setHandleAsyncException(
(str, t) -> submitThroughputFail.set(true))
.setStreamInputProcessor(inputProcessor)
.build());
waitTaskIsRunning(task.streamTask, task.invocationFuture);
TimerService timerService = task.streamTask.systemTimerService;
MailboxExecutor mainMailboxExecutor =
task.streamTask.mailboxProcessor.getMainMailboxExecutor();
CountDownLatch stoppingMailboxLatch = new CountDownLatch(1);
timerService.registerTimer(
timerService.getCurrentProcessingTime(),
(time) -> {
stoppingMailboxLatch.await();
Thread.sleep(5);
mainMailboxExecutor.submit(() -> {}, "test");
});
mainMailboxExecutor
.submit(
() -> {
stoppingMailboxLatch.countDown();
task.streamTask.afterInvoke();
},
"test")
.get();
assertFalse(submitThroughputFail.get());
inputProcessor.availabilityProvider.getUnavailableToResetAvailable().complete(null);
} | class WaitingThread extends Thread {
private final MailboxExecutor executor;
private final RunnableWithException resumeTask;
private final long sleepTimeInsideMail;
private final long sleepTimeOutsideMail;
private final TimerGauge sleepOutsideMailTimer;
@Nullable private Exception asyncException;
public WaitingThread(
MailboxExecutor executor,
RunnableWithException resumeTask,
long sleepTimeInsideMail,
long sleepTimeOutsideMail,
TimerGauge sleepOutsideMailTimer) {
this.executor = executor;
this.resumeTask = resumeTask;
this.sleepTimeInsideMail = sleepTimeInsideMail;
this.sleepTimeOutsideMail = sleepTimeOutsideMail;
this.sleepOutsideMailTimer = sleepOutsideMailTimer;
}
@Override
public void run() {
try {
while (!sleepOutsideMailTimer.isMeasuring()) {
Thread.sleep(1);
}
Thread.sleep(sleepTimeOutsideMail);
} catch (InterruptedException e) {
asyncException = e;
}
executor.submit(
() -> {
if (asyncException != null) {
throw asyncException;
}
Thread.sleep(sleepTimeInsideMail);
resumeTask.run();
},
"This task will complete the future to resume process input action.");
}
} | class WaitingThread extends Thread {
private final MailboxExecutor executor;
private final RunnableWithException resumeTask;
private final long sleepTimeInsideMail;
private final long sleepTimeOutsideMail;
private final TimerGauge sleepOutsideMailTimer;
@Nullable private Exception asyncException;
public WaitingThread(
MailboxExecutor executor,
RunnableWithException resumeTask,
long sleepTimeInsideMail,
long sleepTimeOutsideMail,
TimerGauge sleepOutsideMailTimer) {
this.executor = executor;
this.resumeTask = resumeTask;
this.sleepTimeInsideMail = sleepTimeInsideMail;
this.sleepTimeOutsideMail = sleepTimeOutsideMail;
this.sleepOutsideMailTimer = sleepOutsideMailTimer;
}
@Override
public void run() {
try {
while (!sleepOutsideMailTimer.isMeasuring()) {
Thread.sleep(1);
}
Thread.sleep(sleepTimeOutsideMail);
} catch (InterruptedException e) {
asyncException = e;
}
executor.submit(
() -> {
if (asyncException != null) {
throw asyncException;
}
Thread.sleep(sleepTimeInsideMail);
resumeTask.run();
},
"This task will complete the future to resume process input action.");
}
} |
Please use `assertThat` instead of `assertEquals`. | public void assertCRC32MatchMySQLSingleTableDataCalculatorSuccess() {
String algorithmType = new CRC32MatchMySQLSingleTableDataCalculator().getAlgorithmType();
Assert.assertEquals("CRC32_MATCH", algorithmType);
} | Assert.assertEquals("CRC32_MATCH", algorithmType); | public void assertCRC32MatchMySQLSingleTableDataCalculatorSuccess() {
String actualAlgorithmType = new CRC32MatchMySQLSingleTableDataCalculator().getAlgorithmType();
String expectedAlgorithmType = "CRC32_MATCH";
assertThat(actualAlgorithmType, is(expectedAlgorithmType));
} | class CRC32MatchMySQLSingleTableDataCalculatorTest {
@Mock
private DataCalculateParameter dataCalculateParameter;
private PipelineDataSourceWrapper pipelineJobPrepareFailedException;
@Before
public void setUp() throws SQLException {
pipelineJobPrepareFailedException = mock(PipelineDataSourceWrapper.class, RETURNS_DEEP_STUBS);
Collection<String> columnNames = new ArrayList<String>(Arrays.asList("fieldOne", "fieldTwo", "fieldThree"));
when(dataCalculateParameter.getLogicTableName()).thenReturn("tableName");
when(dataCalculateParameter.getColumnNames()).thenReturn(columnNames);
when(dataCalculateParameter.getDataSource()).thenReturn(pipelineJobPrepareFailedException);
}
@Test
@Test
public void assertGetDatabaseTypesSuccess() {
Collection<String> databaseTypes = new CRC32MatchMySQLSingleTableDataCalculator().getDatabaseTypes();
Assert.assertEquals(1, databaseTypes.size());
Assert.assertEquals("MySQL", databaseTypes.stream().findFirst().get());
}
@Test
public void assertCalculateSuccess() {
Iterable<Object> calculate = new CRC32MatchMySQLSingleTableDataCalculator().calculate(dataCalculateParameter);
long calculateSize = StreamSupport.stream(calculate.spliterator(), false).count();
Assert.assertEquals(3, calculateSize);
}
@Test(expected = PipelineDataConsistencyCheckFailedException.class)
public void assertCalculateFailed() throws SQLException {
Connection connection = mock(Connection.class, RETURNS_DEEP_STUBS);
when(pipelineJobPrepareFailedException.getConnection()).thenReturn(connection);
when(connection.prepareStatement(anyString())).thenThrow(new SQLException());
new CRC32MatchMySQLSingleTableDataCalculator().calculate(dataCalculateParameter);
}
} | class CRC32MatchMySQLSingleTableDataCalculatorTest {
@Mock
private DataCalculateParameter dataCalculateParameter;
private PipelineDataSourceWrapper pipelineDataSource;
private Connection connection;
@Mock
private PreparedStatement preparedStatement;
@Mock
private ResultSet resultSet;
@Before
public void setUp() throws SQLException {
pipelineDataSource = mock(PipelineDataSourceWrapper.class, RETURNS_DEEP_STUBS);
connection = mock(Connection.class, RETURNS_DEEP_STUBS);
Collection<String> columnNames = Arrays.asList("fieldOne", "fieldTwo", "fieldThree");
when(dataCalculateParameter.getLogicTableName()).thenReturn("tableName");
when(dataCalculateParameter.getColumnNames()).thenReturn(columnNames);
when(dataCalculateParameter.getDataSource()).thenReturn(pipelineDataSource);
}
@Test
@Test
public void assertGetDatabaseTypesSuccess() {
Collection<String> actualDatabaseTypes = new CRC32MatchMySQLSingleTableDataCalculator().getDatabaseTypes();
long actualDatabaseTypesSize = actualDatabaseTypes.size();
long expectedDatabaseTypesSize = new Long(1);
String actualDatabaseTypesFirstElement = actualDatabaseTypes.stream().findFirst().get();
String expectedDatabaseTypesFirstElement = "MySQL";
assertThat(actualDatabaseTypesSize, is(expectedDatabaseTypesSize));
assertThat(actualDatabaseTypesFirstElement, is(expectedDatabaseTypesFirstElement));
}
@Test
public void assertCalculateSuccess() {
Iterable<Object> calculate = new CRC32MatchMySQLSingleTableDataCalculator().calculate(dataCalculateParameter);
long actualDatabaseTypesSize = StreamSupport.stream(calculate.spliterator(), false).count();
long expectedDatabaseTypesSize = dataCalculateParameter.getColumnNames().size();
assertThat(actualDatabaseTypesSize, is(expectedDatabaseTypesSize));
}
@Test
public void assertCalculateWithQuerySuccess() throws SQLException {
String sqlCommandForFieldOne = "SELECT BIT_XOR(CAST(CRC32(`fieldOne`) AS UNSIGNED)) AS checksum FROM `tableName`";
String sqlCommandForFieldTwo = "SELECT BIT_XOR(CAST(CRC32(`fieldTwo`) AS UNSIGNED)) AS checksum FROM `tableName`";
String sqlCommandForFieldThree = "SELECT BIT_XOR(CAST(CRC32(`fieldThree`) AS UNSIGNED)) AS checksum FROM `tableName`";
when(pipelineDataSource.getConnection()).thenReturn(connection);
when(connection.prepareStatement(sqlCommandForFieldOne)).thenReturn(preparedStatement);
when(connection.prepareStatement(sqlCommandForFieldTwo)).thenReturn(preparedStatement);
when(connection.prepareStatement(sqlCommandForFieldThree)).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
Iterable<Object> calculate = new CRC32MatchMySQLSingleTableDataCalculator().calculate(dataCalculateParameter);
long actualDatabaseTypesSize = StreamSupport.stream(calculate.spliterator(), false).count();
long expectedDatabaseTypesSize = dataCalculateParameter.getColumnNames().size();
assertThat(actualDatabaseTypesSize, is(expectedDatabaseTypesSize));
}
@Test(expected = PipelineDataConsistencyCheckFailedException.class)
public void assertCalculateFailed() throws SQLException {
when(pipelineDataSource.getConnection()).thenReturn(connection);
when(connection.prepareStatement(anyString())).thenThrow(new SQLException());
new CRC32MatchMySQLSingleTableDataCalculator().calculate(dataCalculateParameter);
}
} |
Yeah, here I can just do that. I'll update | public Response<CommunicationRelayConfiguration> getRelayConfigurationWithResponse(CommunicationUserIdentifier communicationUser, Context context) {
context = context == null ? Context.NONE : context;
CommunicationRelayConfigurationRequest body = new CommunicationRelayConfigurationRequest();
body.setId(communicationUser.getId());
Response<CommunicationRelayConfiguration> response =
client.issueRelayConfigurationWithResponseAsync(body, context).block();
if (response == null || response.getValue() == null) {
throw logger.logExceptionAsError(new IllegalStateException("Service failed to return a response or expected value."));
}
return new SimpleResponse<CommunicationRelayConfiguration>(
response,
response.getValue());
} | return new SimpleResponse<CommunicationRelayConfiguration>( | public Response<CommunicationRelayConfiguration> getRelayConfigurationWithResponse(CommunicationUserIdentifier communicationUser, Context context) {
Response<CommunicationRelayConfiguration> response =
client.getRelayConfigurationWithResponse(communicationUser, context).block();
return response;
} | class CommunicationRelayClient {
private final CommunicationNetworkTraversalsImpl client;
private final ClientLogger logger = new ClientLogger(CommunicationRelayClient.class);
CommunicationRelayClient(CommunicationNetworkingClientImpl communicationNetworkingClient) {
client = communicationNetworkingClient.getCommunicationNetworkTraversals();
}
/**
* Creates a new CommunicationRelayConfiguration.
*
* @param communicationUser The CommunicationUserIdentifier for whom to issue a token
* @return The obtained Communication Relay Configuration
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CommunicationRelayConfiguration getRelayConfiguration(CommunicationUserIdentifier communicationUser) {
CommunicationRelayConfigurationRequest body = new CommunicationRelayConfigurationRequest();
body.setId(communicationUser.getId());
return client.issueRelayConfiguration(body);
}
/**
* Creates a new CommunicationRelayConfiguration with response.
*
* @param communicationUser The CommunicationUserIdentifier for whom to issue a token
* @param context A {@link Context} representing the request context.
* @return The obtained Communication Relay Configuration
*/
} | class CommunicationRelayClient {
private final CommunicationRelayAsyncClient client;
private final ClientLogger logger = new ClientLogger(CommunicationRelayClient.class);
CommunicationRelayClient(CommunicationRelayAsyncClient communicationNetworkingClient) {
client = communicationNetworkingClient;
}
/**
* Creates a new CommunicationRelayConfiguration.
*
* @param communicationUser The CommunicationUserIdentifier for whom to issue a token
* @return The obtained Communication Relay Configuration
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public CommunicationRelayConfiguration getRelayConfiguration(CommunicationUserIdentifier communicationUser) {
return client.getRelayConfiguration(communicationUser).block();
}
/**
* Creates a new CommunicationRelayConfiguration with response.
*
* @param communicationUser The CommunicationUserIdentifier for whom to issue a token
* @param context A {@link Context} representing the request context.
* @return The obtained Communication Relay Configuration
*/
@ServiceMethod(returns = ReturnType.SINGLE)
} |
Currently raw ID is generated based on user ID and cloud environment only. Under some circumstances, rawId from `CommunicationIdentifierModel` could be a totally different value (like `123abc`), which could not be generated. I'm not sure what expected behaviour is, but not setting a rawID explicitly will change the previous logic. | public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) {
if (identifier == null) {
return null;
}
assertSingleType(identifier);
String rawId = identifier.getRawId();
CommunicationIdentifierModelKind kind = identifier.getKind();
if (kind != null) {
if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER
&& identifier.getCommunicationUser() != null) {
Objects.requireNonNull(identifier.getCommunicationUser().getId());
return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId());
}
if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER
&& identifier.getPhoneNumber() != null) {
PhoneNumberIdentifierModel phoneNumberModel = identifier.getPhoneNumber();
Objects.requireNonNull(phoneNumberModel.getValue());
return new PhoneNumberIdentifier(phoneNumberModel.getValue()).setRawId(rawId);
}
if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER
&& identifier.getMicrosoftTeamsUser() != null) {
MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser();
Objects.requireNonNull(teamsUserIdentifierModel.getUserId());
Objects.requireNonNull(teamsUserIdentifierModel.getCloud());
Objects.requireNonNull(rawId);
return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(),
teamsUserIdentifierModel.isAnonymous())
.setRawId(rawId)
.setCloudEnvironment(CommunicationCloudEnvironment
.fromString(teamsUserIdentifierModel.getCloud().toString()));
}
Objects.requireNonNull(rawId);
return new UnknownIdentifier(rawId);
}
if (identifier.getCommunicationUser() != null) {
Objects.requireNonNull(identifier.getCommunicationUser().getId());
return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId());
}
if (identifier.getPhoneNumber() != null) {
PhoneNumberIdentifierModel phoneNumberModel = identifier.getPhoneNumber();
Objects.requireNonNull(phoneNumberModel.getValue());
return new PhoneNumberIdentifier(phoneNumberModel.getValue()).setRawId(rawId);
}
if (identifier.getMicrosoftTeamsUser() != null) {
MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser();
Objects.requireNonNull(teamsUserIdentifierModel.getUserId());
Objects.requireNonNull(teamsUserIdentifierModel.getCloud());
Objects.requireNonNull(rawId);
return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(),
teamsUserIdentifierModel.isAnonymous())
.setRawId(rawId)
.setCloudEnvironment(CommunicationCloudEnvironment
.fromString(teamsUserIdentifierModel.getCloud().toString()));
}
Objects.requireNonNull(rawId);
return new UnknownIdentifier(rawId);
} | .setRawId(rawId) | public static CommunicationIdentifier convert(CommunicationIdentifierModel identifier) {
if (identifier == null) {
return null;
}
assertSingleType(identifier);
String rawId = identifier.getRawId();
CommunicationIdentifierModelKind kind = (identifier.getKind() != null) ? identifier.getKind() : extractKind(identifier);
if (kind == CommunicationIdentifierModelKind.COMMUNICATION_USER
&& identifier.getCommunicationUser() != null) {
Objects.requireNonNull(identifier.getCommunicationUser().getId(),
"'ID' of the CommunicationIdentifierModel cannot be null.");
return new CommunicationUserIdentifier(identifier.getCommunicationUser().getId());
}
if (kind == CommunicationIdentifierModelKind.PHONE_NUMBER
&& identifier.getPhoneNumber() != null) {
String phoneNumber = identifier.getPhoneNumber().getValue();
Objects.requireNonNull(phoneNumber, "'PhoneNumber' of the CommunicationIdentifierModel cannot be null.");
Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null.");
return new PhoneNumberIdentifier(phoneNumber).setRawId(rawId);
}
if (kind == CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER
&& identifier.getMicrosoftTeamsUser() != null) {
MicrosoftTeamsUserIdentifierModel teamsUserIdentifierModel = identifier.getMicrosoftTeamsUser();
Objects.requireNonNull(teamsUserIdentifierModel.getUserId(), "'UserID' of the CommunicationIdentifierModel cannot be null.");
Objects.requireNonNull(teamsUserIdentifierModel.getCloud(), "'Cloud' of the CommunicationIdentifierModel cannot be null.");
Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null.");
return new MicrosoftTeamsUserIdentifier(teamsUserIdentifierModel.getUserId(),
teamsUserIdentifierModel.isAnonymous())
.setRawId(rawId)
.setCloudEnvironment(CommunicationCloudEnvironment
.fromString(teamsUserIdentifierModel.getCloud().toString()));
}
Objects.requireNonNull(rawId, "'RawID' of the CommunicationIdentifierModel cannot be null.");
return new UnknownIdentifier(rawId);
} | class CommunicationIdentifierConverter {
/**
* Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}.
*/
/**
* Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}.
*/
public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier)
throws IllegalArgumentException {
if (identifier == null) {
return null;
}
if (identifier instanceof CommunicationUserIdentifier) {
CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier;
return new CommunicationIdentifierModel()
.setRawId(communicationUserIdentifier.getRawId())
.setCommunicationUser(
new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId()));
}
if (identifier instanceof PhoneNumberIdentifier) {
PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier;
return new CommunicationIdentifierModel()
.setRawId(phoneNumberIdentifier.getRawId())
.setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber()));
}
if (identifier instanceof MicrosoftTeamsUserIdentifier) {
MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier;
return new CommunicationIdentifierModel()
.setRawId(teamsUserIdentifier.getRawId())
.setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel()
.setIsAnonymous(teamsUserIdentifier.isAnonymous())
.setUserId(teamsUserIdentifier.getUserId())
.setCloud(CommunicationCloudEnvironmentModel.fromString(
teamsUserIdentifier.getCloudEnvironment().toString())));
}
if (identifier instanceof UnknownIdentifier) {
UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier;
return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId());
}
throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName()));
}
private static void assertSingleType(CommunicationIdentifierModel identifier) {
CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser();
PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber();
MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser();
ArrayList<String> presentProperties = new ArrayList<>();
if (communicationUser != null) {
presentProperties.add(communicationUser.getClass().getName());
}
if (phoneNumber != null) {
presentProperties.add(phoneNumber.getClass().getName());
}
if (microsoftTeamsUser != null) {
presentProperties.add(microsoftTeamsUser.getClass().getName());
}
if (presentProperties.size() > 1) {
throw new IllegalArgumentException(
String.format(
"Only one of the identifier models in %s should be present.",
String.join(", ", presentProperties)));
}
}
} | class CommunicationIdentifierConverter {
/**
* Maps from {@link CommunicationIdentifierModel} to {@link CommunicationIdentifier}.
*/
/**
* Maps from {@link CommunicationIdentifier} to {@link CommunicationIdentifierModel}.
*/
public static CommunicationIdentifierModel convert(CommunicationIdentifier identifier)
throws IllegalArgumentException {
if (identifier == null) {
return null;
}
if (identifier instanceof CommunicationUserIdentifier) {
CommunicationUserIdentifier communicationUserIdentifier = (CommunicationUserIdentifier) identifier;
return new CommunicationIdentifierModel()
.setRawId(communicationUserIdentifier.getRawId())
.setCommunicationUser(
new CommunicationUserIdentifierModel().setId(communicationUserIdentifier.getId()));
}
if (identifier instanceof PhoneNumberIdentifier) {
PhoneNumberIdentifier phoneNumberIdentifier = (PhoneNumberIdentifier) identifier;
return new CommunicationIdentifierModel()
.setRawId(phoneNumberIdentifier.getRawId())
.setPhoneNumber(new PhoneNumberIdentifierModel().setValue(phoneNumberIdentifier.getPhoneNumber()));
}
if (identifier instanceof MicrosoftTeamsUserIdentifier) {
MicrosoftTeamsUserIdentifier teamsUserIdentifier = (MicrosoftTeamsUserIdentifier) identifier;
return new CommunicationIdentifierModel()
.setRawId(teamsUserIdentifier.getRawId())
.setMicrosoftTeamsUser(new MicrosoftTeamsUserIdentifierModel()
.setIsAnonymous(teamsUserIdentifier.isAnonymous())
.setUserId(teamsUserIdentifier.getUserId())
.setCloud(CommunicationCloudEnvironmentModel.fromString(
teamsUserIdentifier.getCloudEnvironment().toString())));
}
if (identifier instanceof UnknownIdentifier) {
UnknownIdentifier unknownIdentifier = (UnknownIdentifier) identifier;
return new CommunicationIdentifierModel().setRawId(unknownIdentifier.getId());
}
throw new IllegalArgumentException(String.format("Unknown identifier class '%s'", identifier.getClass().getName()));
}
private static void assertSingleType(CommunicationIdentifierModel identifier) {
CommunicationUserIdentifierModel communicationUser = identifier.getCommunicationUser();
PhoneNumberIdentifierModel phoneNumber = identifier.getPhoneNumber();
MicrosoftTeamsUserIdentifierModel microsoftTeamsUser = identifier.getMicrosoftTeamsUser();
ArrayList<String> presentProperties = new ArrayList<>();
if (communicationUser != null) {
presentProperties.add(communicationUser.getClass().getName());
}
if (phoneNumber != null) {
presentProperties.add(phoneNumber.getClass().getName());
}
if (microsoftTeamsUser != null) {
presentProperties.add(microsoftTeamsUser.getClass().getName());
}
if (presentProperties.size() > 1) {
throw new IllegalArgumentException(
String.format(
"Only one of the identifier models in %s should be present.",
String.join(", ", presentProperties)));
}
}
private static CommunicationIdentifierModelKind extractKind(CommunicationIdentifierModel identifier) {
Objects.requireNonNull(identifier, "CommunicationIdentifierModel cannot be null.");
if (identifier.getCommunicationUser() != null) {
return CommunicationIdentifierModelKind.COMMUNICATION_USER;
}
if (identifier.getPhoneNumber() != null) {
return CommunicationIdentifierModelKind.PHONE_NUMBER;
}
if (identifier.getMicrosoftTeamsUser() != null) {
return CommunicationIdentifierModelKind.MICROSOFT_TEAMS_USER;
}
return CommunicationIdentifierModelKind.UNKNOWN;
}
} |
I don't think this is quite right; except the value of `resources`, the other query parameters are regular query parameters, so the `&` and `=` should not be URL encoded? | String urlEncodedPriceInformation() {
var parameters = "supportLevel=standard&committedSpend=0&enclave=false" +
"&resources=nodes=1,vcpu=1,memoryGb=1,diskGb=10,gpuMemoryGb=0" +
"&resources=nodes=1,vcpu=1,memoryGb=1,diskGb=10,gpuMemoryGb=0";
return URLEncoder.encode(parameters, UTF_8);
} | return URLEncoder.encode(parameters, UTF_8); | String urlEncodedPriceInformation() {
String resources = URLEncoder.encode("nodes=1,vcpu=1,memoryGb=1,diskGb=10,gpuMemoryGb=0", UTF_8);
return "supportLevel=basic&committedSpend=0&enclave=false" +
"&resources=" + resources +
"&resources=" + resources;
} | class PricingApiHandlerTest extends ControllerContainerCloudTest {
private static final String responseFiles = "src/test/java/com/yahoo/vespa/hosted/controller/restapi/pricing/responses/";
@Test
void testPricingInfo() {
ContainerTester tester = new ContainerTester(container, responseFiles);
assertEquals(SystemName.Public, tester.controller().system());
var request = request("/pricing/v1/pricing?" + urlEncodedPriceInformation());
tester.assertResponse(request, """
{"listPrice":"2400.00","volumeDiscount":"0.00"}""",
200);
}
/**
* 2 clusters, with each having 1 node, with 1 vcpu, 1 Gb memory, 10 Gb disk and no GPU
* price will be 20000 + 2000 + 200
*/
} | class PricingApiHandlerTest extends ControllerContainerCloudTest {
private static final String responseFiles = "src/test/java/com/yahoo/vespa/hosted/controller/restapi/pricing/responses/";
@Test
void testPricingInfo() {
ContainerTester tester = new ContainerTester(container, responseFiles);
assertEquals(SystemName.Public, tester.controller().system());
var request = request("/pricing/v1/pricing?" + urlEncodedPriceInformation());
tester.assertJsonResponse(request, """
{
"priceInfo": [
{"description": "List price", "amount": "2400.00"},
{"description": "Volume discount", "amount": "-5.00"}
],
"totalAmount": "2395.00"
}
""",
200);
}
@Test
void testPricingInfoWithIncompleteParameter() {
ContainerTester tester = new ContainerTester(container, responseFiles);
assertEquals(SystemName.Public, tester.controller().system());
var request = request("/pricing/v1/pricing?" + urlEncodedPriceInformationWithMissingValueInResourcs());
tester.assertJsonResponse(request,
"{\"error-code\":\"BAD_REQUEST\",\"message\":\"Error in query parameter, expected '=' between key and value: resources\"}",
400);
}
/**
* 2 clusters, with each having 1 node, with 1 vcpu, 1 Gb memory, 10 Gb disk and no GPU
* price will be 20000 + 2000 + 200
*/
String urlEncodedPriceInformationWithMissingValueInResourcs() {
return URLEncoder.encode("supportLevel=basic&committedSpend=0&enclave=false&resources", UTF_8);
}
} |
Should be ensured thread-safe modification | public void setShouldFailRequest(boolean shouldFailRequest) {
this.shouldFailRequest = shouldFailRequest;
} | this.shouldFailRequest = shouldFailRequest; | public void setShouldFailRequest(boolean shouldFailRequest) {
this.shouldFailRequest = shouldFailRequest;
} | class TimeoutWriter extends AsyncSinkWriter<String, Long> {
private Exception fatalError;
private final CountDownLatch completionLatch;
private Thread submitThread;
private boolean shouldFailRequest = false;
public TimeoutWriter(
WriterInitContext writerInitContext,
int maxBatchSize,
long maximumTimeInBufferMs,
long requestTimeout,
boolean failOnTimeout) {
super(
(ElementConverter<String, Long>) (element, context) -> Long.parseLong(element),
writerInitContext,
AsyncSinkWriterConfiguration.builder()
.setMaxBatchSize(maxBatchSize)
.setMaxBatchSizeInBytes(Long.MAX_VALUE)
.setMaxInFlightRequests(Integer.MAX_VALUE)
.setMaxBufferedRequests(Integer.MAX_VALUE)
.setMaxTimeInBufferMS(maximumTimeInBufferMs)
.setMaxRecordSizeInBytes(Long.MAX_VALUE)
.setRequestTimeoutMS(requestTimeout)
.setFailOnTimeout(failOnTimeout)
.build(),
Collections.emptyList());
this.completionLatch = new CountDownLatch(1);
}
@Override
protected void submitRequestEntries(
List<Long> requestEntries, ResultHandler<Long> resultHandler) {
submitThread =
new Thread(
() -> {
while (completionLatch.getCount() > 0) {
try {
completionLatch.await();
} catch (InterruptedException e) {
fail("Submission thread must not be interrupted.");
}
}
submitRequestEntriesSync(requestEntries, resultHandler);
});
submitThread.start();
}
private void submitRequestEntriesSync(
List<Long> requestEntries, ResultHandler<Long> resultHandler) {
if (fatalError != null) {
resultHandler.completeExceptionally(fatalError);
} else if (shouldFailRequest) {
shouldFailRequest = false;
resultHandler.retryForEntries(requestEntries);
} else {
destination.addAll(requestEntries);
resultHandler.complete();
}
}
@Override
protected long getSizeInBytes(Long requestEntry) {
return 8;
}
public void setFatalError(Exception fatalError) {
this.fatalError = fatalError;
}
public void deliverMessage() throws InterruptedException {
completionLatch.countDown();
submitThread.join();
}
} | class TimeoutWriter extends AsyncSinkWriter<String, Long> {
private Exception fatalError;
private final CountDownLatch completionLatch;
private Future<?> submitFuture;
private boolean shouldFailRequest = false;
public TimeoutWriter(
WriterInitContext writerInitContext,
int maxBatchSize,
long maximumTimeInBufferMs,
long requestTimeout,
boolean failOnTimeout) {
super(
(ElementConverter<String, Long>) (element, context) -> Long.parseLong(element),
writerInitContext,
AsyncSinkWriterConfiguration.builder()
.setMaxBatchSize(maxBatchSize)
.setMaxBatchSizeInBytes(Long.MAX_VALUE)
.setMaxInFlightRequests(Integer.MAX_VALUE)
.setMaxBufferedRequests(Integer.MAX_VALUE)
.setMaxTimeInBufferMS(maximumTimeInBufferMs)
.setMaxRecordSizeInBytes(Long.MAX_VALUE)
.setRequestTimeoutMS(requestTimeout)
.setFailOnTimeout(failOnTimeout)
.build(),
Collections.emptyList());
this.completionLatch = new CountDownLatch(1);
}
@Override
protected void submitRequestEntries(
List<Long> requestEntries, ResultHandler<Long> resultHandler) {
submitFuture =
executorService.submit(
() -> {
while (completionLatch.getCount() > 0) {
try {
completionLatch.await();
} catch (InterruptedException e) {
fail("Submission thread must not be interrupted.");
}
}
submitRequestEntriesSync(requestEntries, resultHandler);
});
}
private void submitRequestEntriesSync(
List<Long> requestEntries, ResultHandler<Long> resultHandler) {
if (fatalError != null) {
resultHandler.completeExceptionally(fatalError);
} else if (shouldFailRequest) {
shouldFailRequest = false;
resultHandler.retryForEntries(requestEntries);
} else {
destination.addAll(requestEntries);
resultHandler.complete();
}
}
@Override
protected long getSizeInBytes(Long requestEntry) {
return 8;
}
public void setFatalError(Exception fatalError) {
this.fatalError = fatalError;
}
public void deliverMessage() throws InterruptedException, ExecutionException {
completionLatch.countDown();
submitFuture.get();
}
} |
```suggestion return new LogicalSubQueryAlias<>(ctx.identifier().getText(), columnNames, queryPlan); ``` | public LogicalSubQueryAlias<Plan> visitAliasQuery(AliasQueryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan queryPlan = plan(ctx.query());
Optional<List<String>> columnNames = optionalVisit(ctx.columnAliases(), () ->
ctx.columnAliases().identifier().stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList())
);
return new LogicalSubQueryAlias(ctx.identifier().getText(), columnNames, queryPlan);
});
} | return new LogicalSubQueryAlias(ctx.identifier().getText(), columnNames, queryPlan); | public LogicalSubQueryAlias<Plan> visitAliasQuery(AliasQueryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan queryPlan = plan(ctx.query());
Optional<List<String>> columnNames = optionalVisit(ctx.columnAliases(), () ->
ctx.columnAliases().identifier().stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList())
);
return new LogicalSubQueryAlias<>(ctx.identifier().getText(), columnNames, queryPlan);
});
} | class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
@SuppressWarnings("unchecked")
protected <T> T typedVisit(ParseTree ctx) {
return (T) ctx.accept(this);
}
/**
* Override the default behavior for all visit methods. This will only return a non-null result
* when the context has only one child. This is done because there is no generic method to
* combine the results of the context children. In all other cases null is returned.
*/
@Override
public Object visitChildren(RuleNode node) {
if (node.getChildCount() == 1) {
return node.getChild(0).accept(this);
} else {
return null;
}
}
@Override
public LogicalPlan visitSingleStatement(SingleStatementContext ctx) {
return ParserUtils.withOrigin(ctx, () -> (LogicalPlan) visit(ctx.statement()));
}
@Override
public LogicalPlan visitStatementDefault(StatementDefaultContext ctx) {
LogicalPlan plan = plan(ctx.query());
return withExplain(plan, ctx.explain());
}
/**
* Visit multi-statements.
*/
@Override
public List<Pair<LogicalPlan, StatementContext>> visitMultiStatements(MultiStatementsContext ctx) {
List<Pair<LogicalPlan, StatementContext>> logicalPlans = Lists.newArrayList();
for (org.apache.doris.nereids.DorisParser.StatementContext statement : ctx.statement()) {
StatementContext statementContext = new StatementContext();
ConnectContext connectContext = ConnectContext.get();
if (connectContext != null) {
connectContext.setStatementContext(statementContext);
statementContext.setConnectContext(connectContext);
}
logicalPlans.add(Pair.of(
ParserUtils.withOrigin(ctx, () -> (LogicalPlan) visit(statement)), statementContext));
}
return logicalPlans;
}
/* ********************************************************************************************
* Plan parsing
* ******************************************************************************************** */
/**
* process lateral view, add a {@link org.apache.doris.nereids.trees.plans.logical.LogicalGenerate} on plan.
*/
private LogicalPlan withGenerate(LogicalPlan plan, LateralViewContext ctx) {
if (ctx.LATERAL() == null) {
return plan;
}
String generateName = ctx.tableName.getText();
String columnName = ctx.columnName.getText();
String functionName = ctx.functionName.getText();
List<Expression> arguments = ctx.expression().stream()
.<Expression>map(this::typedVisit)
.collect(ImmutableList.toImmutableList());
Function unboundFunction = new UnboundFunction(functionName, arguments);
return new LogicalGenerate<>(ImmutableList.of(unboundFunction),
ImmutableList.of(new UnboundSlot(generateName, columnName)), plan);
}
/**
* process CTE and store the results in a logical plan node LogicalCTE
*/
private LogicalPlan withCte(LogicalPlan plan, CteContext ctx) {
if (ctx == null) {
return plan;
}
return new LogicalCTE<>((List) visit(ctx.aliasQuery(), LogicalSubQueryAlias.class), plan);
}
/**
* process CTE's alias queries and column aliases
*/
@Override
@Override
public Command visitCreateRowPolicy(CreateRowPolicyContext ctx) {
return new CreatePolicyCommand(PolicyTypeEnum.ROW, getExpression(ctx.booleanExpression()));
}
@Override
public LogicalPlan visitQuery(QueryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan query = plan(ctx.queryTerm());
query = withCte(query, ctx.cte());
return withQueryOrganization(query, ctx.queryOrganization());
});
}
@Override
public LogicalPlan visitSetOperation(SetOperationContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan leftQuery = plan(ctx.left);
LogicalPlan rightQuery = plan(ctx.right);
Qualifier qualifier;
if (ctx.setQuantifier() == null || ctx.setQuantifier().DISTINCT() != null) {
qualifier = Qualifier.DISTINCT;
} else {
qualifier = Qualifier.ALL;
}
List<Plan> newChildren = new ImmutableList.Builder<Plan>()
.add(leftQuery)
.add(rightQuery)
.build();
if (ctx.UNION() != null) {
return new LogicalUnion(qualifier, newChildren);
} else if (ctx.EXCEPT() != null) {
return new LogicalExcept(qualifier, newChildren);
} else if (ctx.INTERSECT() != null) {
return new LogicalIntersect(qualifier, newChildren);
}
throw new ParseException("not support", ctx);
});
}
@Override
public LogicalPlan visitSubquery(SubqueryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> plan(ctx.query()));
}
@Override
public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
SelectClauseContext selectCtx = ctx.selectClause();
LogicalPlan selectPlan;
if (ctx.fromClause() == null) {
SelectColumnClauseContext columnCtx = selectCtx.selectColumnClause();
if (columnCtx.EXCEPT() != null) {
throw new ParseException("select-except cannot be used in one row relation", selectCtx);
}
selectPlan = withOneRowRelation(columnCtx);
} else {
LogicalPlan relation = visitFromClause(ctx.fromClause());
selectPlan = withSelectQuerySpecification(
ctx, relation,
selectCtx,
Optional.ofNullable(ctx.whereClause()),
Optional.ofNullable(ctx.aggClause()),
Optional.ofNullable(ctx.havingClause())
);
}
selectPlan = withCte(selectPlan, ctx.cte());
selectPlan = withQueryOrganization(selectPlan, ctx.queryOrganization());
return withSelectHint(selectPlan, selectCtx.selectHint());
});
}
/**
* Create an aliased table reference. This is typically used in FROM clauses.
*/
private LogicalPlan withTableAlias(LogicalPlan plan, TableAliasContext ctx) {
if (ctx.strictIdentifier() == null) {
return plan;
}
return ParserUtils.withOrigin(ctx.strictIdentifier(), () -> {
String alias = ctx.strictIdentifier().getText();
if (null != ctx.identifierList()) {
throw new ParseException("Do not implemented", ctx);
}
return new LogicalSubQueryAlias<>(alias, plan);
});
}
private LogicalPlan withCheckPolicy(LogicalPlan plan) {
return new LogicalCheckPolicy<>(plan);
}
@Override
public LogicalPlan visitTableName(TableNameContext ctx) {
List<String> tableId = visitMultipartIdentifier(ctx.multipartIdentifier());
List<String> partitionNames = new ArrayList<>();
boolean isTempPart = false;
if (ctx.specifiedPartition() != null) {
isTempPart = ctx.specifiedPartition().TEMPORARY() != null;
if (ctx.specifiedPartition().identifier() != null) {
partitionNames.add(ctx.specifiedPartition().identifier().getText());
} else {
partitionNames.addAll(visitIdentifierList(ctx.specifiedPartition().identifierList()));
}
}
LogicalPlan checkedRelation = withCheckPolicy(
new UnboundRelation(RelationUtil.newRelationId(), tableId, partitionNames, isTempPart));
LogicalPlan plan = withTableAlias(checkedRelation, ctx.tableAlias());
for (LateralViewContext lateralViewContext : ctx.lateralView()) {
plan = withGenerate(plan, lateralViewContext);
}
return plan;
}
@Override
public LogicalPlan visitAliasedQuery(AliasedQueryContext ctx) {
if (ctx.tableAlias().getText().equals("")) {
throw new ParseException("Every derived table must have its own alias", ctx);
}
LogicalPlan plan = withTableAlias(visitQuery(ctx.query()), ctx.tableAlias());
for (LateralViewContext lateralViewContext : ctx.lateralView()) {
plan = withGenerate(plan, lateralViewContext);
}
return plan;
}
@Override
public LogicalPlan visitTableValuedFunction(TableValuedFunctionContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
String functionName = ctx.tvfName.getText();
Builder<String, String> map = ImmutableMap.builder();
for (TvfPropertyContext argument : ctx.properties) {
String key = parseTVFPropertyItem(argument.key);
String value = parseTVFPropertyItem(argument.value);
map.put(key, value);
}
LogicalPlan relation = new UnboundTVFRelation(RelationUtil.newRelationId(),
functionName, new TVFProperties(map.build()));
return withTableAlias(relation, ctx.tableAlias());
});
}
/**
* Create a star (i.e. all) expression; this selects all elements (in the specified object).
* Both un-targeted (global) and targeted aliases are supported.
*/
@Override
public Expression visitStar(StarContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
final QualifiedNameContext qualifiedNameContext = ctx.qualifiedName();
List<String> target;
if (qualifiedNameContext != null) {
target = qualifiedNameContext.identifier()
.stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
} else {
target = ImmutableList.of();
}
return new UnboundStar(target);
});
}
/**
* Create an aliased expression if an alias is specified. Both single and multi-aliases are
* supported.
*/
@Override
public Expression visitNamedExpression(NamedExpressionContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression expression = getExpression(ctx.expression());
if (ctx.errorCapturingIdentifier() != null) {
return new UnboundAlias(expression, ctx.errorCapturingIdentifier().getText());
} else if (ctx.STRING() != null) {
return new UnboundAlias(expression, ctx.STRING().getText()
.substring(1, ctx.STRING().getText().length() - 1));
} else {
return expression;
}
});
}
@Override
public Expression visitSystemVariable(SystemVariableContext ctx) {
String name = ctx.identifier().getText();
SessionVariable sessionVariable = ConnectContext.get().getSessionVariable();
Literal literal = null;
if (ctx.kind == null) {
literal = VariableMgr.getLiteral(sessionVariable, name, SetType.DEFAULT);
} else if (ctx.kind.getType() == DorisParser.SESSION) {
literal = VariableMgr.getLiteral(sessionVariable, name, SetType.SESSION);
} else if (ctx.kind.getType() == DorisParser.GLOBAL) {
literal = VariableMgr.getLiteral(sessionVariable, name, SetType.GLOBAL);
}
if (literal == null) {
throw new ParseException("Unsupported system variable: " + ctx.getText(), ctx);
}
if (!Strings.isNullOrEmpty(name) && VariableVarConverters.hasConverter(name)) {
try {
Preconditions.checkArgument(literal instanceof IntegerLikeLiteral);
IntegerLikeLiteral integerLikeLiteral = (IntegerLikeLiteral) literal;
literal = new StringLiteral(VariableVarConverters.decode(name, integerLikeLiteral.getLongValue()));
} catch (DdlException e) {
throw new ParseException(e.getMessage(), ctx);
}
}
return literal;
}
@Override
public Expression visitUserVariable(UserVariableContext ctx) {
throw new ParseException("Unsupported user variable :" + ctx.getText(), ctx);
}
/**
* Create a comparison expression. This compares two expressions. The following comparison
* operators are supported:
* - Equal: '=' or '=='
* - Null-safe Equal: '<=>'
* - Not Equal: '<>' or '!='
* - Less than: '<'
* - Less then or Equal: '<='
* - Greater than: '>'
* - Greater then or Equal: '>='
*/
@Override
public Expression visitComparison(ComparisonContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
TerminalNode operator = (TerminalNode) ctx.comparisonOperator().getChild(0);
switch (operator.getSymbol().getType()) {
case DorisParser.EQ:
return new EqualTo(left, right);
case DorisParser.NEQ:
return new Not(new EqualTo(left, right));
case DorisParser.LT:
return new LessThan(left, right);
case DorisParser.GT:
return new GreaterThan(left, right);
case DorisParser.LTE:
return new LessThanEqual(left, right);
case DorisParser.GTE:
return new GreaterThanEqual(left, right);
case DorisParser.NSEQ:
return new NullSafeEqual(left, right);
default:
throw new ParseException("Unsupported comparison expression: "
+ operator.getSymbol().getText(), ctx);
}
});
}
/**
* Create a not expression.
* format: NOT Expression
* for example:
* not 1
* not 1=1
*/
@Override
public Expression visitLogicalNot(LogicalNotContext ctx) {
return ParserUtils.withOrigin(ctx, () -> new Not(getExpression(ctx.booleanExpression())));
}
@Override
public Expression visitLogicalBinary(LogicalBinaryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
switch (ctx.operator.getType()) {
case DorisParser.LOGICALAND:
case DorisParser.AND:
return new And(left, right);
case DorisParser.OR:
return new Or(left, right);
default:
throw new ParseException("Unsupported logical binary type: " + ctx.operator.getText(), ctx);
}
});
}
/**
* Create a predicated expression. A predicated expression is a normal expression with a
* predicate attached to it, for example:
* {{{
* a + 1 IS NULL
* }}}
*/
@Override
public Expression visitPredicated(PredicatedContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression e = getExpression(ctx.valueExpression());
return ctx.predicate() == null ? e : withPredicate(e, ctx.predicate());
});
}
@Override
public Expression visitArithmeticUnary(ArithmeticUnaryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression e = typedVisit(ctx.valueExpression());
switch (ctx.operator.getType()) {
case DorisParser.PLUS:
return e;
case DorisParser.MINUS:
IntegerLiteral zero = new IntegerLiteral(0);
return new Subtract(zero, e);
case DorisParser.TILDE:
return new BitNot(e);
default:
throw new ParseException("Unsupported arithmetic unary type: " + ctx.operator.getText(), ctx);
}
});
}
@Override
public Expression visitBitOperation(BitOperationContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
if (ctx.operator.getType() == DorisParser.BITAND) {
return new BitAnd(left, right);
} else if (ctx.operator.getType() == DorisParser.BITOR) {
return new BitOr(left, right);
} else if (ctx.operator.getType() == DorisParser.BITXOR) {
return new BitXor(left, right);
}
throw new ParseException(" not supported", ctx);
});
}
@Override
public Expression visitArithmeticBinary(ArithmeticBinaryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
int type = ctx.operator.getType();
if (left instanceof Interval) {
if (type != DorisParser.PLUS) {
throw new ParseException("Only supported: " + Operator.ADD, ctx);
}
Interval interval = (Interval) left;
return new TimestampArithmetic(Operator.ADD, right, interval.value(), interval.timeUnit(), true);
}
if (right instanceof Interval) {
Operator op;
if (type == DorisParser.PLUS) {
op = Operator.ADD;
} else if (type == DorisParser.MINUS) {
op = Operator.SUBTRACT;
} else {
throw new ParseException("Only supported: " + Operator.ADD + " and " + Operator.SUBTRACT, ctx);
}
Interval interval = (Interval) right;
return new TimestampArithmetic(op, left, interval.value(), interval.timeUnit(), false);
}
return ParserUtils.withOrigin(ctx, () -> {
switch (type) {
case DorisParser.ASTERISK:
return new Multiply(left, right);
case DorisParser.SLASH:
return new Divide(left, right);
case DorisParser.PERCENT:
return new Mod(left, right);
case DorisParser.PLUS:
return new Add(left, right);
case DorisParser.MINUS:
return new Subtract(left, right);
case DorisParser.DIV:
return new IntegralDivide(left, right);
case DorisParser.HAT:
return new BitXor(left, right);
case DorisParser.PIPE:
return new BitOr(left, right);
case DorisParser.AMPERSAND:
return new BitAnd(left, right);
default:
throw new ParseException(
"Unsupported arithmetic binary type: " + ctx.operator.getText(), ctx);
}
});
});
}
@Override
public Expression visitTimestampdiff(TimestampdiffContext ctx) {
Expression start = (Expression) visit(ctx.startTimestamp);
Expression end = (Expression) visit(ctx.endTimestamp);
String unit = ctx.unit.getText();
if ("YEAR".equalsIgnoreCase(unit)) {
return new YearsDiff(end, start);
} else if ("MONTH".equalsIgnoreCase(unit)) {
return new MonthsDiff(end, start);
} else if ("WEEK".equalsIgnoreCase(unit)) {
return new WeeksDiff(end, start);
} else if ("DAY".equalsIgnoreCase(unit)) {
return new DaysDiff(end, start);
} else if ("HOUR".equalsIgnoreCase(unit)) {
return new HoursDiff(end, start);
} else if ("MINUTE".equalsIgnoreCase(unit)) {
return new MinutesDiff(end, start);
} else if ("SECOND".equalsIgnoreCase(unit)) {
return new SecondsDiff(end, start);
}
throw new ParseException("Unsupported time stamp diff time unit: " + unit
+ ", supported time unit: YEAR/MONTH/WEEK/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitTimestampadd(TimestampaddContext ctx) {
Expression start = (Expression) visit(ctx.startTimestamp);
Expression end = (Expression) visit(ctx.endTimestamp);
String unit = ctx.unit.getText();
if ("YEAR".equalsIgnoreCase(unit)) {
return new YearsAdd(end, start);
} else if ("MONTH".equalsIgnoreCase(unit)) {
return new MonthsAdd(end, start);
} else if ("WEEK".equalsIgnoreCase(unit)) {
return new WeeksAdd(end, start);
} else if ("DAY".equalsIgnoreCase(unit)) {
return new DaysAdd(end, start);
} else if ("HOUR".equalsIgnoreCase(unit)) {
return new HoursAdd(end, start);
} else if ("MINUTE".equalsIgnoreCase(unit)) {
return new MinutesAdd(end, start);
} else if ("SECOND".equalsIgnoreCase(unit)) {
return new SecondsAdd(end, start);
}
throw new ParseException("Unsupported time stamp add time unit: " + unit
+ ", supported time unit: YEAR/MONTH/WEEK/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitDate_add(Date_addContext ctx) {
Expression timeStamp = (Expression) visit(ctx.timestamp);
Expression amount = (Expression) visit(ctx.unitsAmount);
if (ctx.unit == null) {
return new DaysAdd(timeStamp, amount);
}
if ("Year".equalsIgnoreCase(ctx.unit.getText())) {
return new YearsAdd(timeStamp, amount);
} else if ("MONTH".equalsIgnoreCase(ctx.unit.getText())) {
return new MonthsAdd(timeStamp, amount);
} else if ("WEEK".equalsIgnoreCase(ctx.unit.getText())) {
return new WeeksAdd(timeStamp, amount);
} else if ("DAY".equalsIgnoreCase(ctx.unit.getText())) {
return new DaysAdd(timeStamp, amount);
} else if ("Hour".equalsIgnoreCase(ctx.unit.getText())) {
return new HoursAdd(timeStamp, amount);
} else if ("Minute".equalsIgnoreCase(ctx.unit.getText())) {
return new MinutesAdd(timeStamp, amount);
} else if ("Second".equalsIgnoreCase(ctx.unit.getText())) {
return new SecondsAdd(timeStamp, amount);
}
throw new ParseException("Unsupported time unit: " + ctx.unit
+ ", supported time unit: YEAR/MONTH/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitDate_sub(Date_subContext ctx) {
Expression timeStamp = (Expression) visit(ctx.timestamp);
Expression amount = (Expression) visit(ctx.unitsAmount);
if (ctx.unit == null) {
return new DaysSub(timeStamp, amount);
}
if ("Year".equalsIgnoreCase(ctx.unit.getText())) {
return new YearsSub(timeStamp, amount);
} else if ("MONTH".equalsIgnoreCase(ctx.unit.getText())) {
return new MonthsSub(timeStamp, amount);
} else if ("WEEK".equalsIgnoreCase(ctx.unit.getText())) {
return new WeeksSub(timeStamp, amount);
} else if ("DAY".equalsIgnoreCase(ctx.unit.getText())) {
return new DaysSub(timeStamp, amount);
} else if ("Hour".equalsIgnoreCase(ctx.unit.getText())) {
return new HoursSub(timeStamp, amount);
} else if ("Minute".equalsIgnoreCase(ctx.unit.getText())) {
return new MinutesSub(timeStamp, amount);
} else if ("Second".equalsIgnoreCase(ctx.unit.getText())) {
return new SecondsSub(timeStamp, amount);
}
throw new ParseException("Unsupported time unit: " + ctx.unit
+ ", supported time unit: YEAR/MONTH/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitDoublePipes(DorisParser.DoublePipesContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
if (ConnectContext.get().getSessionVariable().getSqlMode() == SqlModeHelper.MODE_PIPES_AS_CONCAT) {
return new UnboundFunction("concat", Lists.newArrayList(left, right));
} else {
return new Or(left, right);
}
});
}
/**
* Create a value based [[CaseWhen]] expression. This has the following SQL form:
* {{{
* CASE [expression]
* WHEN [value] THEN [expression]
* ...
* ELSE [expression]
* END
* }}}
*/
@Override
public Expression visitSimpleCase(DorisParser.SimpleCaseContext context) {
Expression e = getExpression(context.value);
List<WhenClause> whenClauses = context.whenClause().stream()
.map(w -> new WhenClause(new EqualTo(e, getExpression(w.condition)), getExpression(w.result)))
.collect(ImmutableList.toImmutableList());
if (context.elseExpression == null) {
return new CaseWhen(whenClauses);
}
return new CaseWhen(whenClauses, getExpression(context.elseExpression));
}
/**
* Create a condition based [[CaseWhen]] expression. This has the following SQL syntax:
* {{{
* CASE
* WHEN [predicate] THEN [expression]
* ...
* ELSE [expression]
* END
* }}}
*
* @param context the parse tree
*/
@Override
public Expression visitSearchedCase(DorisParser.SearchedCaseContext context) {
List<WhenClause> whenClauses = context.whenClause().stream()
.map(w -> new WhenClause(getExpression(w.condition), getExpression(w.result)))
.collect(ImmutableList.toImmutableList());
if (context.elseExpression == null) {
return new CaseWhen(whenClauses);
}
return new CaseWhen(whenClauses, getExpression(context.elseExpression));
}
@Override
public Expression visitCast(DorisParser.CastContext ctx) {
return ParserUtils.withOrigin(ctx, () ->
new Cast(getExpression(ctx.expression()), typedVisit(ctx.dataType())));
}
@Override
public UnboundFunction visitExtract(DorisParser.ExtractContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
String functionName = ctx.field.getText();
return new UnboundFunction(functionName, false,
Collections.singletonList(getExpression(ctx.source)));
});
}
@Override
public Expression visitFunctionCall(DorisParser.FunctionCallContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
String functionName = ctx.functionIdentifier().getText();
boolean isDistinct = ctx.DISTINCT() != null;
List<Expression> params = visit(ctx.expression(), Expression.class);
List<OrderKey> orderKeys = visit(ctx.sortItem(), OrderKey.class);
if (!orderKeys.isEmpty()) {
return parseFunctionWithOrderKeys(functionName, isDistinct, params, orderKeys, ctx);
}
List<UnboundStar> unboundStars = ExpressionUtils.collectAll(params, UnboundStar.class::isInstance);
if (unboundStars.size() > 0) {
if (functionName.equalsIgnoreCase("count")) {
if (unboundStars.size() > 1) {
throw new ParseException(
"'*' can only be used once in conjunction with COUNT: " + functionName, ctx);
}
if (!unboundStars.get(0).getQualifier().isEmpty()) {
throw new ParseException("'*' can not has qualifier: " + unboundStars.size(), ctx);
}
if (ctx.windowSpec() != null) {
throw new ParseException(
"COUNT(*) isn't supported as window function; can use COUNT(col)", ctx);
}
return new Count();
}
throw new ParseException("'*' can only be used in conjunction with COUNT: " + functionName, ctx);
} else {
UnboundFunction function = new UnboundFunction(functionName, isDistinct, params);
if (ctx.windowSpec() != null) {
if (isDistinct) {
throw new ParseException("DISTINCT not allowed in window function: " + functionName, ctx);
}
return withWindowSpec(ctx.windowSpec(), function);
}
return function;
}
});
}
/**
* deal with window function definition
*/
private Window withWindowSpec(WindowSpecContext ctx, Expression function) {
Optional<List<Expression>> partitionKeys = optionalVisit(ctx.partitionClause(),
() -> visit(ctx.partitionClause().expression(), Expression.class));
Optional<List<OrderKey>> orderKeys = optionalVisit(ctx.sortClause(),
() -> visit(ctx.sortClause().sortItem(), OrderKey.class));
Optional<WindowFrame> windowFrame = optionalVisit(ctx.windowFrame(),
() -> withWindowFrame(ctx.windowFrame()));
return new Window(function, partitionKeys, orderKeys, windowFrame);
}
/**
* deal with optional expressions
*/
private <T, C> Optional optionalVisit(T ctx, Supplier<C> func) {
return Optional.ofNullable(ctx).map(a -> Optional.of(func.get()))
.orElse(Optional.empty());
}
/**
* deal with window frame
*/
private WindowFrame withWindowFrame(WindowFrameContext ctx) {
FrameUnitsType frameUnitsType = FrameUnitsType.valueOf(ctx.frameUnits().getText().toUpperCase());
FrameBoundary leftBoundary = withFrameBound(ctx.start);
if (ctx.end != null) {
FrameBoundary rightBoundary = withFrameBound(ctx.end);
return new WindowFrame(frameUnitsType, leftBoundary, rightBoundary);
}
return new WindowFrame(frameUnitsType, leftBoundary);
}
private FrameBoundary withFrameBound(FrameBoundContext ctx) {
Optional<Expression> expression = Optional.empty();
if (ctx.expression() != null) {
expression = Optional.of(getExpression(ctx.expression()));
if (!expression.get().isLiteral()) {
throw new ParseException("Unsupported expression in WindowFrame : " + expression, ctx);
}
}
FrameBoundType frameBoundType = null;
switch (ctx.boundType.getType()) {
case DorisParser.PRECEDING:
if (ctx.UNBOUNDED() != null) {
frameBoundType = FrameBoundType.UNBOUNDED_PRECEDING;
} else {
frameBoundType = FrameBoundType.PRECEDING;
}
break;
case DorisParser.CURRENT:
frameBoundType = FrameBoundType.CURRENT_ROW;
break;
case DorisParser.FOLLOWING:
if (ctx.UNBOUNDED() != null) {
frameBoundType = FrameBoundType.UNBOUNDED_FOLLOWING;
} else {
frameBoundType = FrameBoundType.FOLLOWING;
}
break;
default:
}
return new FrameBoundary(expression, frameBoundType);
}
@Override
public Expression visitInterval(IntervalContext ctx) {
return new Interval(getExpression(ctx.value), visitUnitIdentifier(ctx.unit));
}
@Override
public String visitUnitIdentifier(UnitIdentifierContext ctx) {
return ctx.getText();
}
@Override
public Expression visitTypeConstructor(TypeConstructorContext ctx) {
String value = ctx.STRING().getText();
value = value.substring(1, value.length() - 1);
String type = ctx.type.getText().toUpperCase();
switch (type) {
case "DATE":
return new DateLiteral(value);
case "TIMESTAMP":
return new DateTimeLiteral(value);
case "DATEV2":
return new DateV2Literal(value);
default:
throw new ParseException("Unsupported data type : " + type, ctx);
}
}
@Override
public Expression visitDereference(DereferenceContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression e = getExpression(ctx.base);
if (e instanceof UnboundSlot) {
UnboundSlot unboundAttribute = (UnboundSlot) e;
List<String> nameParts = Lists.newArrayList(unboundAttribute.getNameParts());
nameParts.add(ctx.fieldName.getText());
return new UnboundSlot(nameParts);
} else {
throw new ParseException("Unsupported dereference expression: " + ctx.getText(), ctx);
}
});
}
@Override
public UnboundSlot visitColumnReference(ColumnReferenceContext ctx) {
return UnboundSlot.quoted(ctx.getText());
}
/**
* Create a NULL literal expression.
*/
@Override
public Expression visitNullLiteral(NullLiteralContext ctx) {
return new NullLiteral();
}
@Override
public Literal visitBooleanLiteral(BooleanLiteralContext ctx) {
Boolean b = Boolean.valueOf(ctx.getText());
return BooleanLiteral.of(b);
}
@Override
public Literal visitIntegerLiteral(IntegerLiteralContext ctx) {
BigInteger bigInt = new BigInteger(ctx.getText());
if (BigInteger.valueOf(bigInt.byteValue()).equals(bigInt)) {
return new TinyIntLiteral(bigInt.byteValue());
} else if (BigInteger.valueOf(bigInt.shortValue()).equals(bigInt)) {
return new SmallIntLiteral(bigInt.shortValue());
} else if (BigInteger.valueOf(bigInt.intValue()).equals(bigInt)) {
return new IntegerLiteral(bigInt.intValue());
} else if (BigInteger.valueOf(bigInt.longValue()).equals(bigInt)) {
return new BigIntLiteral(bigInt.longValueExact());
} else {
return new LargeIntLiteral(bigInt);
}
}
@Override
public Literal visitStringLiteral(StringLiteralContext ctx) {
String txt = ctx.STRING().getText();
String s = escapeBackSlash(txt.substring(1, txt.length() - 1));
return new VarcharLiteral(s);
}
private String escapeBackSlash(String str) {
StringBuilder sb = new StringBuilder();
int strLen = str.length();
for (int i = 0; i < strLen; ++i) {
char c = str.charAt(i);
if (c == '\\' && (i + 1) < strLen) {
switch (str.charAt(i + 1)) {
case 'n':
sb.append('\n');
break;
case 't':
sb.append('\t');
break;
case 'r':
sb.append('\r');
break;
case 'b':
sb.append('\b');
break;
case '0':
sb.append('\0');
break;
case 'Z':
sb.append('\032');
break;
case '_':
case '%':
sb.append('\\');
sb.append(str.charAt(i + 1));
break;
default:
sb.append(str.charAt(i + 1));
break;
}
i++;
} else {
sb.append(c);
}
}
return sb.toString();
}
@Override
public Expression visitParenthesizedExpression(ParenthesizedExpressionContext ctx) {
return getExpression(ctx.expression());
}
@Override
public List<Expression> visitNamedExpressionSeq(NamedExpressionSeqContext namedCtx) {
return visit(namedCtx.namedExpression(), Expression.class);
}
@Override
public LogicalPlan visitRelation(RelationContext ctx) {
return plan(ctx.relationPrimary());
}
@Override
public LogicalPlan visitFromClause(FromClauseContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan left = null;
for (RelationContext relation : ctx.relation()) {
LogicalPlan right = visitRelation(relation);
left = (left == null) ? right :
new LogicalJoin<>(
JoinType.CROSS_JOIN,
ExpressionUtils.EMPTY_CONDITION,
ExpressionUtils.EMPTY_CONDITION,
JoinHint.NONE,
left,
right);
left = withJoinRelations(left, relation);
}
return left;
});
}
/* ********************************************************************************************
* Table Identifier parsing
* ******************************************************************************************** */
@Override
public List<String> visitMultipartIdentifier(MultipartIdentifierContext ctx) {
return ctx.parts.stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
}
/**
* Create a Sequence of Strings for a parenthesis enclosed alias list.
*/
@Override
public List<String> visitIdentifierList(IdentifierListContext ctx) {
return visitIdentifierSeq(ctx.identifierSeq());
}
/**
* Create a Sequence of Strings for an identifier list.
*/
@Override
public List<String> visitIdentifierSeq(IdentifierSeqContext ctx) {
return ctx.ident.stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
}
/**
* get OrderKey.
*
* @param ctx SortItemContext
* @return SortItems
*/
@Override
public OrderKey visitSortItem(SortItemContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
boolean isAsc = ctx.DESC() == null;
boolean isNullFirst = ctx.FIRST() != null || (ctx.LAST() == null && isAsc);
Expression expression = typedVisit(ctx.expression());
return new OrderKey(expression, isAsc, isNullFirst);
});
}
private <T> List<T> visit(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
return contexts.stream()
.map(this::visit)
.map(clazz::cast)
.collect(ImmutableList.toImmutableList());
}
private LogicalPlan plan(ParserRuleContext tree) {
return (LogicalPlan) tree.accept(this);
}
/* ********************************************************************************************
* Expression parsing
* ******************************************************************************************** */
/**
* Create an expression from the given context. This method just passes the context on to the
* visitor and only takes care of typing (We assume that the visitor returns an Expression here).
*/
private Expression getExpression(ParserRuleContext ctx) {
return typedVisit(ctx);
}
private LogicalPlan withExplain(LogicalPlan inputPlan, ExplainContext ctx) {
if (ctx == null) {
return inputPlan;
}
return ParserUtils.withOrigin(ctx, () -> {
ExplainLevel explainLevel = ExplainLevel.NORMAL;
if (ctx.planType() != null) {
if (ctx.level == null || !ctx.level.getText().equalsIgnoreCase("plan")) {
throw new ParseException("Only explain plan can use plan type: " + ctx.planType().getText(), ctx);
}
}
if (ctx.level != null) {
if (!ctx.level.getText().equalsIgnoreCase("plan")) {
explainLevel = ExplainLevel.valueOf(ctx.level.getText().toUpperCase(Locale.ROOT));
} else {
explainLevel = parseExplainPlanType(ctx.planType());
}
}
return new ExplainCommand(explainLevel, inputPlan);
});
}
private LogicalPlan withQueryOrganization(LogicalPlan inputPlan, QueryOrganizationContext ctx) {
if (ctx == null) {
return inputPlan;
}
Optional<SortClauseContext> sortClauseContext = Optional.ofNullable(ctx.sortClause());
Optional<LimitClauseContext> limitClauseContext = Optional.ofNullable(ctx.limitClause());
LogicalPlan sort = withSort(inputPlan, sortClauseContext);
return withLimit(sort, limitClauseContext);
}
private LogicalPlan withSort(LogicalPlan input, Optional<SortClauseContext> sortCtx) {
return input.optionalMap(sortCtx, () -> {
List<OrderKey> orderKeys = visit(sortCtx.get().sortItem(), OrderKey.class);
return new LogicalSort<>(orderKeys, input);
});
}
private LogicalPlan withLimit(LogicalPlan input, Optional<LimitClauseContext> limitCtx) {
return input.optionalMap(limitCtx, () -> {
long limit = Long.parseLong(limitCtx.get().limit.getText());
if (limit < 0) {
throw new ParseException("Limit requires non-negative number", limitCtx.get());
}
long offset = 0;
Token offsetToken = limitCtx.get().offset;
if (offsetToken != null) {
offset = Long.parseLong(offsetToken.getText());
}
return new LogicalLimit<>(limit, offset, input);
});
}
private UnboundOneRowRelation withOneRowRelation(SelectColumnClauseContext selectCtx) {
return ParserUtils.withOrigin(selectCtx, () -> {
List<NamedExpression> projects = getNamedExpressions(selectCtx.namedExpressionSeq());
return new UnboundOneRowRelation(RelationUtil.newRelationId(), projects);
});
}
/**
* Add a regular (SELECT) query specification to a logical plan. The query specification
* is the core of the logical plan, this is where sourcing (FROM clause), projection (SELECT),
* aggregation (GROUP BY ... HAVING ...) and filtering (WHERE) takes place.
*
* <p>Note that query hints are ignored (both by the parser and the builder).
*/
private LogicalPlan withSelectQuerySpecification(
ParserRuleContext ctx,
LogicalPlan inputRelation,
SelectClauseContext selectClause,
Optional<WhereClauseContext> whereClause,
Optional<AggClauseContext> aggClause,
Optional<HavingClauseContext> havingClause) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan filter = withFilter(inputRelation, whereClause);
SelectColumnClauseContext selectColumnCtx = selectClause.selectColumnClause();
LogicalPlan aggregate = withAggregate(filter, selectColumnCtx, aggClause);
boolean isDistinct = (selectClause.DISTINCT() != null);
if (isDistinct && aggregate instanceof Aggregate) {
throw new ParseException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY",
selectClause);
}
if (!(aggregate instanceof Aggregate) && havingClause.isPresent()) {
LogicalPlan project;
if (selectColumnCtx.EXCEPT() != null) {
List<NamedExpression> expressions = getNamedExpressions(selectColumnCtx.namedExpressionSeq());
if (!expressions.stream().allMatch(UnboundSlot.class::isInstance)) {
throw new ParseException("only column name is supported in except clause", selectColumnCtx);
}
project = new LogicalProject<>(ImmutableList.of(new UnboundStar(ImmutableList.of())),
expressions, aggregate, isDistinct);
} else {
List<NamedExpression> projects = getNamedExpressions(selectColumnCtx.namedExpressionSeq());
project = new LogicalProject<>(projects, ImmutableList.of(), aggregate, isDistinct);
}
return new LogicalHaving<>(ExpressionUtils.extractConjunctionToSet(
getExpression((havingClause.get().booleanExpression()))), project);
} else {
LogicalPlan having = withHaving(aggregate, havingClause);
return withProjection(having, selectColumnCtx, aggClause, isDistinct);
}
});
}
/**
* Join one more [[LogicalPlan]]s to the current logical plan.
*/
private LogicalPlan withJoinRelations(LogicalPlan input, RelationContext ctx) {
LogicalPlan last = input;
for (JoinRelationContext join : ctx.joinRelation()) {
JoinType joinType;
if (join.joinType().CROSS() != null) {
joinType = JoinType.CROSS_JOIN;
} else if (join.joinType().FULL() != null) {
joinType = JoinType.FULL_OUTER_JOIN;
} else if (join.joinType().SEMI() != null) {
if (join.joinType().LEFT() != null) {
joinType = JoinType.LEFT_SEMI_JOIN;
} else {
joinType = JoinType.RIGHT_SEMI_JOIN;
}
} else if (join.joinType().ANTI() != null) {
if (join.joinType().LEFT() != null) {
joinType = JoinType.LEFT_ANTI_JOIN;
} else {
joinType = JoinType.RIGHT_ANTI_JOIN;
}
} else if (join.joinType().LEFT() != null) {
joinType = JoinType.LEFT_OUTER_JOIN;
} else if (join.joinType().RIGHT() != null) {
joinType = JoinType.RIGHT_OUTER_JOIN;
} else if (join.joinType().INNER() != null) {
joinType = JoinType.INNER_JOIN;
} else if (join.joinCriteria() != null) {
joinType = JoinType.INNER_JOIN;
} else {
joinType = JoinType.CROSS_JOIN;
}
JoinHint joinHint = Optional.ofNullable(join.joinHint()).map(hintCtx -> {
String hint = typedVisit(join.joinHint());
if (JoinHint.JoinHintType.SHUFFLE.toString().equalsIgnoreCase(hint)) {
return JoinHint.SHUFFLE_RIGHT;
} else if (JoinHint.JoinHintType.BROADCAST.toString().equalsIgnoreCase(hint)) {
return JoinHint.BROADCAST_RIGHT;
} else {
throw new ParseException("Invalid join hint: " + hint, hintCtx);
}
}).orElse(JoinHint.NONE);
JoinCriteriaContext joinCriteria = join.joinCriteria();
Optional<Expression> condition = Optional.empty();
List<Expression> ids = null;
if (joinCriteria != null) {
if (join.joinType().CROSS() != null) {
throw new ParseException("Cross join can't be used with ON clause", joinCriteria);
}
if (joinCriteria.booleanExpression() != null) {
condition = Optional.ofNullable(getExpression(joinCriteria.booleanExpression()));
} else if (joinCriteria.USING() != null) {
ids = visitIdentifierList(joinCriteria.identifierList())
.stream().map(UnboundSlot::quoted)
.collect(ImmutableList.toImmutableList());
}
} else {
if (!joinType.isInnerOrCrossJoin()) {
throw new ParseException("on mustn't be empty except for cross/inner join", join);
}
}
if (ids == null) {
last = new LogicalJoin<>(joinType, ExpressionUtils.EMPTY_CONDITION,
condition.map(ExpressionUtils::extractConjunction)
.orElse(ExpressionUtils.EMPTY_CONDITION),
joinHint,
last,
plan(join.relationPrimary()));
} else {
last = new UsingJoin<>(joinType, last,
plan(join.relationPrimary()), ImmutableList.of(), ids, joinHint);
}
}
return last;
}
private LogicalPlan withSelectHint(LogicalPlan logicalPlan, SelectHintContext hintContext) {
if (hintContext == null) {
return logicalPlan;
}
Map<String, SelectHint> hints = Maps.newLinkedHashMap();
for (HintStatementContext hintStatement : hintContext.hintStatements) {
String hintName = hintStatement.hintName.getText().toLowerCase(Locale.ROOT);
Map<String, Optional<String>> parameters = Maps.newLinkedHashMap();
for (HintAssignmentContext kv : hintStatement.parameters) {
String parameterName = kv.key.getText();
Optional<String> value = Optional.empty();
if (kv.constantValue != null) {
Literal literal = (Literal) visit(kv.constantValue);
value = Optional.ofNullable(literal.toLegacyLiteral().getStringValue());
} else if (kv.identifierValue != null) {
value = Optional.ofNullable(kv.identifierValue.getText());
}
parameters.put(parameterName, value);
}
hints.put(hintName, new SelectHint(hintName, parameters));
}
return new LogicalSelectHint<>(hints, logicalPlan);
}
@Override
public String visitBracketStyleHint(BracketStyleHintContext ctx) {
return ctx.identifier().getText();
}
@Override
public Object visitCommentStyleHint(CommentStyleHintContext ctx) {
return ctx.identifier().getText();
}
private LogicalPlan withProjection(LogicalPlan input, SelectColumnClauseContext selectCtx,
Optional<AggClauseContext> aggCtx, boolean isDistinct) {
return ParserUtils.withOrigin(selectCtx, () -> {
if (aggCtx.isPresent()) {
return input;
} else {
if (selectCtx.EXCEPT() != null) {
List<NamedExpression> expressions = getNamedExpressions(selectCtx.namedExpressionSeq());
if (!expressions.stream().allMatch(UnboundSlot.class::isInstance)) {
throw new ParseException("only column name is supported in except clause", selectCtx);
}
return new LogicalProject<>(ImmutableList.of(new UnboundStar(ImmutableList.of())),
expressions, input, isDistinct);
} else {
List<NamedExpression> projects = getNamedExpressions(selectCtx.namedExpressionSeq());
if (containsWindowExpressions(projects)) {
return new LogicalWindow<>(projects, input);
}
return new LogicalProject<>(projects, Collections.emptyList(), input, isDistinct);
}
}
});
}
private boolean containsWindowExpressions(List<NamedExpression> expressions) {
return expressions.stream().anyMatch(expr -> expr.anyMatch(Window.class::isInstance));
}
private LogicalPlan withFilter(LogicalPlan input, Optional<WhereClauseContext> whereCtx) {
return input.optionalMap(whereCtx, () ->
new LogicalFilter<>(ExpressionUtils.extractConjunctionToSet(
getExpression(whereCtx.get().booleanExpression())), input));
}
private LogicalPlan withAggregate(LogicalPlan input, SelectColumnClauseContext selectCtx,
Optional<AggClauseContext> aggCtx) {
return input.optionalMap(aggCtx, () -> {
GroupingElementContext groupingElementContext = aggCtx.get().groupingElement();
List<NamedExpression> namedExpressions = getNamedExpressions(selectCtx.namedExpressionSeq());
if (groupingElementContext.GROUPING() != null) {
ImmutableList.Builder<List<Expression>> groupingSets = ImmutableList.builder();
for (GroupingSetContext groupingSetContext : groupingElementContext.groupingSet()) {
groupingSets.add(visit(groupingSetContext.expression(), Expression.class));
}
return new LogicalRepeat<>(groupingSets.build(), namedExpressions, input);
} else if (groupingElementContext.CUBE() != null) {
List<Expression> cubeExpressions = visit(groupingElementContext.expression(), Expression.class);
List<List<Expression>> groupingSets = ExpressionUtils.cubeToGroupingSets(cubeExpressions);
return new LogicalRepeat<>(groupingSets, namedExpressions, input);
} else if (groupingElementContext.ROLLUP() != null) {
List<Expression> rollupExpressions = visit(groupingElementContext.expression(), Expression.class);
List<List<Expression>> groupingSets = ExpressionUtils.rollupToGroupingSets(rollupExpressions);
return new LogicalRepeat<>(groupingSets, namedExpressions, input);
} else {
List<Expression> groupByExpressions = visit(groupingElementContext.expression(), Expression.class);
return new LogicalAggregate<>(groupByExpressions, namedExpressions, input);
}
});
}
private LogicalPlan withHaving(LogicalPlan input, Optional<HavingClauseContext> havingCtx) {
return input.optionalMap(havingCtx, () -> {
if (!(input instanceof Aggregate)) {
throw new ParseException("Having clause should be applied against an aggregation.", havingCtx.get());
}
return new LogicalHaving<>(ExpressionUtils.extractConjunctionToSet(
getExpression((havingCtx.get().booleanExpression()))), input);
});
}
/**
* match predicate type and generate different predicates.
*
* @param ctx PredicateContext
* @param valueExpression valueExpression
* @return Expression
*/
private Expression withPredicate(Expression valueExpression, PredicateContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression outExpression;
switch (ctx.kind.getType()) {
case DorisParser.BETWEEN:
outExpression = new Between(
valueExpression,
getExpression(ctx.lower),
getExpression(ctx.upper)
);
break;
case DorisParser.LIKE:
outExpression = new Like(
valueExpression,
getExpression(ctx.pattern)
);
break;
case DorisParser.RLIKE:
case DorisParser.REGEXP:
outExpression = new Regexp(
valueExpression,
getExpression(ctx.pattern)
);
break;
case DorisParser.IN:
if (ctx.query() == null) {
outExpression = new InPredicate(
valueExpression,
withInList(ctx)
);
} else {
outExpression = new InSubquery(
valueExpression,
new ListQuery(typedVisit(ctx.query())),
ctx.NOT() != null
);
}
break;
case DorisParser.NULL:
outExpression = new IsNull(valueExpression);
break;
default:
throw new ParseException("Unsupported predicate type: " + ctx.kind.getText(), ctx);
}
return ctx.NOT() != null ? new Not(outExpression) : outExpression;
});
}
private List<NamedExpression> getNamedExpressions(NamedExpressionSeqContext namedCtx) {
return ParserUtils.withOrigin(namedCtx, () -> {
List<Expression> expressions = visit(namedCtx.namedExpression(), Expression.class);
return expressions.stream().map(expression -> {
if (expression instanceof NamedExpression) {
return (NamedExpression) expression;
} else {
return new UnboundAlias(expression);
}
}).collect(ImmutableList.toImmutableList());
});
}
@Override
public Expression visitSubqueryExpression(SubqueryExpressionContext subqueryExprCtx) {
return ParserUtils.withOrigin(subqueryExprCtx, () -> new ScalarSubquery(typedVisit(subqueryExprCtx.query())));
}
@Override
public Expression visitExist(ExistContext context) {
return ParserUtils.withOrigin(context, () -> new Exists(typedVisit(context.query()), false));
}
@Override
public Expression visitIsnull(IsnullContext context) {
return ParserUtils.withOrigin(context, () -> new IsNull(typedVisit(context.valueExpression())));
}
@Override
public Expression visitIs_not_null_pred(Is_not_null_predContext context) {
return ParserUtils.withOrigin(context, () -> new Not(new IsNull(typedVisit(context.valueExpression()))));
}
public List<Expression> withInList(PredicateContext ctx) {
return ctx.expression().stream().map(this::getExpression).collect(ImmutableList.toImmutableList());
}
@Override
public DecimalLiteral visitDecimalLiteral(DecimalLiteralContext ctx) {
return new DecimalLiteral(new BigDecimal(ctx.getText()));
}
private String parseTVFPropertyItem(TvfPropertyItemContext item) {
if (item.constant() != null) {
Object constant = visit(item.constant());
if (constant instanceof Literal && ((Literal) constant).isStringLiteral()) {
return ((Literal) constant).getStringValue();
}
}
return item.getText();
}
private ExplainLevel parseExplainPlanType(PlanTypeContext planTypeContext) {
if (planTypeContext == null || planTypeContext.ALL() != null) {
return ExplainLevel.ALL_PLAN;
}
if (planTypeContext.PHYSICAL() != null || planTypeContext.OPTIMIZED() != null) {
return ExplainLevel.OPTIMIZED_PLAN;
}
if (planTypeContext.REWRITTEN() != null || planTypeContext.LOGICAL() != null) {
return ExplainLevel.REWRITTEN_PLAN;
}
if (planTypeContext.ANALYZED() != null) {
return ExplainLevel.ANALYZED_PLAN;
}
if (planTypeContext.PARSED() != null) {
return ExplainLevel.PARSED_PLAN;
}
return ExplainLevel.ALL_PLAN;
}
@Override
public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) {
String dataType = ctx.identifier().getText().toLowerCase(Locale.ROOT);
List<String> l = Lists.newArrayList(dataType);
ctx.INTEGER_VALUE().stream().map(ParseTree::getText).forEach(l::add);
return DataType.convertPrimitiveFromStrings(l);
}
private Expression parseFunctionWithOrderKeys(String functionName, boolean isDistinct,
List<Expression> params, List<OrderKey> orderKeys, ParserRuleContext ctx) {
if (functionName.equalsIgnoreCase("group_concat")) {
OrderExpression[] orderExpressions = orderKeys.stream()
.map(OrderExpression::new)
.toArray(OrderExpression[]::new);
if (params.size() == 1) {
return new GroupConcat(isDistinct, params.get(0), orderExpressions);
} else if (params.size() == 2) {
return new GroupConcat(isDistinct, params.get(0), params.get(1), orderExpressions);
} else {
throw new ParseException("group_concat requires one or two parameters: " + params, ctx);
}
}
throw new ParseException("Unsupported function with order expressions" + ctx.getText(), ctx);
}
} | class LogicalPlanBuilder extends DorisParserBaseVisitor<Object> {
@SuppressWarnings("unchecked")
protected <T> T typedVisit(ParseTree ctx) {
return (T) ctx.accept(this);
}
/**
* Override the default behavior for all visit methods. This will only return a non-null result
* when the context has only one child. This is done because there is no generic method to
* combine the results of the context children. In all other cases null is returned.
*/
@Override
public Object visitChildren(RuleNode node) {
if (node.getChildCount() == 1) {
return node.getChild(0).accept(this);
} else {
return null;
}
}
@Override
public LogicalPlan visitSingleStatement(SingleStatementContext ctx) {
return ParserUtils.withOrigin(ctx, () -> (LogicalPlan) visit(ctx.statement()));
}
@Override
public LogicalPlan visitStatementDefault(StatementDefaultContext ctx) {
LogicalPlan plan = plan(ctx.query());
return withExplain(plan, ctx.explain());
}
/**
* Visit multi-statements.
*/
@Override
public List<Pair<LogicalPlan, StatementContext>> visitMultiStatements(MultiStatementsContext ctx) {
List<Pair<LogicalPlan, StatementContext>> logicalPlans = Lists.newArrayList();
for (org.apache.doris.nereids.DorisParser.StatementContext statement : ctx.statement()) {
StatementContext statementContext = new StatementContext();
ConnectContext connectContext = ConnectContext.get();
if (connectContext != null) {
connectContext.setStatementContext(statementContext);
statementContext.setConnectContext(connectContext);
}
logicalPlans.add(Pair.of(
ParserUtils.withOrigin(ctx, () -> (LogicalPlan) visit(statement)), statementContext));
}
return logicalPlans;
}
/* ********************************************************************************************
* Plan parsing
* ******************************************************************************************** */
/**
* process lateral view, add a {@link org.apache.doris.nereids.trees.plans.logical.LogicalGenerate} on plan.
*/
private LogicalPlan withGenerate(LogicalPlan plan, LateralViewContext ctx) {
if (ctx.LATERAL() == null) {
return plan;
}
String generateName = ctx.tableName.getText();
String columnName = ctx.columnName.getText();
String functionName = ctx.functionName.getText();
List<Expression> arguments = ctx.expression().stream()
.<Expression>map(this::typedVisit)
.collect(ImmutableList.toImmutableList());
Function unboundFunction = new UnboundFunction(functionName, arguments);
return new LogicalGenerate<>(ImmutableList.of(unboundFunction),
ImmutableList.of(new UnboundSlot(generateName, columnName)), plan);
}
/**
* process CTE and store the results in a logical plan node LogicalCTE
*/
private LogicalPlan withCte(LogicalPlan plan, CteContext ctx) {
if (ctx == null) {
return plan;
}
return new LogicalCTE<>((List) visit(ctx.aliasQuery(), LogicalSubQueryAlias.class), plan);
}
/**
* process CTE's alias queries and column aliases
*/
@Override
@Override
public Command visitCreateRowPolicy(CreateRowPolicyContext ctx) {
return new CreatePolicyCommand(PolicyTypeEnum.ROW, getExpression(ctx.booleanExpression()));
}
@Override
public LogicalPlan visitQuery(QueryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan query = plan(ctx.queryTerm());
query = withCte(query, ctx.cte());
return withQueryOrganization(query, ctx.queryOrganization());
});
}
@Override
public LogicalPlan visitSetOperation(SetOperationContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan leftQuery = plan(ctx.left);
LogicalPlan rightQuery = plan(ctx.right);
Qualifier qualifier;
if (ctx.setQuantifier() == null || ctx.setQuantifier().DISTINCT() != null) {
qualifier = Qualifier.DISTINCT;
} else {
qualifier = Qualifier.ALL;
}
List<Plan> newChildren = new ImmutableList.Builder<Plan>()
.add(leftQuery)
.add(rightQuery)
.build();
if (ctx.UNION() != null) {
return new LogicalUnion(qualifier, newChildren);
} else if (ctx.EXCEPT() != null) {
return new LogicalExcept(qualifier, newChildren);
} else if (ctx.INTERSECT() != null) {
return new LogicalIntersect(qualifier, newChildren);
}
throw new ParseException("not support", ctx);
});
}
@Override
public LogicalPlan visitSubquery(SubqueryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> plan(ctx.query()));
}
@Override
public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
SelectClauseContext selectCtx = ctx.selectClause();
LogicalPlan selectPlan;
if (ctx.fromClause() == null) {
SelectColumnClauseContext columnCtx = selectCtx.selectColumnClause();
if (columnCtx.EXCEPT() != null) {
throw new ParseException("select-except cannot be used in one row relation", selectCtx);
}
selectPlan = withOneRowRelation(columnCtx);
} else {
LogicalPlan relation = visitFromClause(ctx.fromClause());
selectPlan = withSelectQuerySpecification(
ctx, relation,
selectCtx,
Optional.ofNullable(ctx.whereClause()),
Optional.ofNullable(ctx.aggClause()),
Optional.ofNullable(ctx.havingClause())
);
}
selectPlan = withCte(selectPlan, ctx.cte());
selectPlan = withQueryOrganization(selectPlan, ctx.queryOrganization());
return withSelectHint(selectPlan, selectCtx.selectHint());
});
}
/**
* Create an aliased table reference. This is typically used in FROM clauses.
*/
private LogicalPlan withTableAlias(LogicalPlan plan, TableAliasContext ctx) {
if (ctx.strictIdentifier() == null) {
return plan;
}
return ParserUtils.withOrigin(ctx.strictIdentifier(), () -> {
String alias = ctx.strictIdentifier().getText();
if (null != ctx.identifierList()) {
throw new ParseException("Do not implemented", ctx);
}
return new LogicalSubQueryAlias<>(alias, plan);
});
}
private LogicalPlan withCheckPolicy(LogicalPlan plan) {
return new LogicalCheckPolicy<>(plan);
}
@Override
public LogicalPlan visitTableName(TableNameContext ctx) {
List<String> tableId = visitMultipartIdentifier(ctx.multipartIdentifier());
List<String> partitionNames = new ArrayList<>();
boolean isTempPart = false;
if (ctx.specifiedPartition() != null) {
isTempPart = ctx.specifiedPartition().TEMPORARY() != null;
if (ctx.specifiedPartition().identifier() != null) {
partitionNames.add(ctx.specifiedPartition().identifier().getText());
} else {
partitionNames.addAll(visitIdentifierList(ctx.specifiedPartition().identifierList()));
}
}
final List<String> relationHints;
if (ctx.relationHint() != null) {
relationHints = typedVisit(ctx.relationHint());
} else {
relationHints = ImmutableList.of();
}
LogicalPlan checkedRelation = withCheckPolicy(
new UnboundRelation(RelationUtil.newRelationId(), tableId, partitionNames, isTempPart, relationHints));
LogicalPlan plan = withTableAlias(checkedRelation, ctx.tableAlias());
for (LateralViewContext lateralViewContext : ctx.lateralView()) {
plan = withGenerate(plan, lateralViewContext);
}
return plan;
}
@Override
public LogicalPlan visitAliasedQuery(AliasedQueryContext ctx) {
if (ctx.tableAlias().getText().equals("")) {
throw new ParseException("Every derived table must have its own alias", ctx);
}
LogicalPlan plan = withTableAlias(visitQuery(ctx.query()), ctx.tableAlias());
for (LateralViewContext lateralViewContext : ctx.lateralView()) {
plan = withGenerate(plan, lateralViewContext);
}
return plan;
}
@Override
public LogicalPlan visitTableValuedFunction(TableValuedFunctionContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
String functionName = ctx.tvfName.getText();
Builder<String, String> map = ImmutableMap.builder();
for (TvfPropertyContext argument : ctx.properties) {
String key = parseTVFPropertyItem(argument.key);
String value = parseTVFPropertyItem(argument.value);
map.put(key, value);
}
LogicalPlan relation = new UnboundTVFRelation(RelationUtil.newRelationId(),
functionName, new TVFProperties(map.build()));
return withTableAlias(relation, ctx.tableAlias());
});
}
/**
* Create a star (i.e. all) expression; this selects all elements (in the specified object).
* Both un-targeted (global) and targeted aliases are supported.
*/
@Override
public Expression visitStar(StarContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
final QualifiedNameContext qualifiedNameContext = ctx.qualifiedName();
List<String> target;
if (qualifiedNameContext != null) {
target = qualifiedNameContext.identifier()
.stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
} else {
target = ImmutableList.of();
}
return new UnboundStar(target);
});
}
/**
* Create an aliased expression if an alias is specified. Both single and multi-aliases are
* supported.
*/
@Override
public Expression visitNamedExpression(NamedExpressionContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression expression = getExpression(ctx.expression());
if (ctx.errorCapturingIdentifier() != null) {
return new UnboundAlias(expression, ctx.errorCapturingIdentifier().getText());
} else if (ctx.STRING() != null) {
return new UnboundAlias(expression, ctx.STRING().getText()
.substring(1, ctx.STRING().getText().length() - 1));
} else {
return expression;
}
});
}
@Override
public Expression visitSystemVariable(SystemVariableContext ctx) {
String name = ctx.identifier().getText();
SessionVariable sessionVariable = ConnectContext.get().getSessionVariable();
Literal literal = null;
if (ctx.kind == null) {
literal = VariableMgr.getLiteral(sessionVariable, name, SetType.DEFAULT);
} else if (ctx.kind.getType() == DorisParser.SESSION) {
literal = VariableMgr.getLiteral(sessionVariable, name, SetType.SESSION);
} else if (ctx.kind.getType() == DorisParser.GLOBAL) {
literal = VariableMgr.getLiteral(sessionVariable, name, SetType.GLOBAL);
}
if (literal == null) {
throw new ParseException("Unsupported system variable: " + ctx.getText(), ctx);
}
if (!Strings.isNullOrEmpty(name) && VariableVarConverters.hasConverter(name)) {
try {
Preconditions.checkArgument(literal instanceof IntegerLikeLiteral);
IntegerLikeLiteral integerLikeLiteral = (IntegerLikeLiteral) literal;
literal = new StringLiteral(VariableVarConverters.decode(name, integerLikeLiteral.getLongValue()));
} catch (DdlException e) {
throw new ParseException(e.getMessage(), ctx);
}
}
return literal;
}
@Override
public Expression visitUserVariable(UserVariableContext ctx) {
throw new ParseException("Unsupported user variable :" + ctx.getText(), ctx);
}
/**
* Create a comparison expression. This compares two expressions. The following comparison
* operators are supported:
* - Equal: '=' or '=='
* - Null-safe Equal: '<=>'
* - Not Equal: '<>' or '!='
* - Less than: '<'
* - Less then or Equal: '<='
* - Greater than: '>'
* - Greater then or Equal: '>='
*/
@Override
public Expression visitComparison(ComparisonContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
TerminalNode operator = (TerminalNode) ctx.comparisonOperator().getChild(0);
switch (operator.getSymbol().getType()) {
case DorisParser.EQ:
return new EqualTo(left, right);
case DorisParser.NEQ:
return new Not(new EqualTo(left, right));
case DorisParser.LT:
return new LessThan(left, right);
case DorisParser.GT:
return new GreaterThan(left, right);
case DorisParser.LTE:
return new LessThanEqual(left, right);
case DorisParser.GTE:
return new GreaterThanEqual(left, right);
case DorisParser.NSEQ:
return new NullSafeEqual(left, right);
default:
throw new ParseException("Unsupported comparison expression: "
+ operator.getSymbol().getText(), ctx);
}
});
}
/**
* Create a not expression.
* format: NOT Expression
* for example:
* not 1
* not 1=1
*/
@Override
public Expression visitLogicalNot(LogicalNotContext ctx) {
return ParserUtils.withOrigin(ctx, () -> new Not(getExpression(ctx.booleanExpression())));
}
@Override
public Expression visitLogicalBinary(LogicalBinaryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
switch (ctx.operator.getType()) {
case DorisParser.LOGICALAND:
case DorisParser.AND:
return new And(left, right);
case DorisParser.OR:
return new Or(left, right);
default:
throw new ParseException("Unsupported logical binary type: " + ctx.operator.getText(), ctx);
}
});
}
/**
* Create a predicated expression. A predicated expression is a normal expression with a
* predicate attached to it, for example:
* {{{
* a + 1 IS NULL
* }}}
*/
@Override
public Expression visitPredicated(PredicatedContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression e = getExpression(ctx.valueExpression());
return ctx.predicate() == null ? e : withPredicate(e, ctx.predicate());
});
}
@Override
public Expression visitArithmeticUnary(ArithmeticUnaryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression e = typedVisit(ctx.valueExpression());
switch (ctx.operator.getType()) {
case DorisParser.PLUS:
return e;
case DorisParser.MINUS:
IntegerLiteral zero = new IntegerLiteral(0);
return new Subtract(zero, e);
case DorisParser.TILDE:
return new BitNot(e);
default:
throw new ParseException("Unsupported arithmetic unary type: " + ctx.operator.getText(), ctx);
}
});
}
@Override
public Expression visitBitOperation(BitOperationContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
if (ctx.operator.getType() == DorisParser.BITAND) {
return new BitAnd(left, right);
} else if (ctx.operator.getType() == DorisParser.BITOR) {
return new BitOr(left, right);
} else if (ctx.operator.getType() == DorisParser.BITXOR) {
return new BitXor(left, right);
}
throw new ParseException(" not supported", ctx);
});
}
@Override
public Expression visitArithmeticBinary(ArithmeticBinaryContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
int type = ctx.operator.getType();
if (left instanceof Interval) {
if (type != DorisParser.PLUS) {
throw new ParseException("Only supported: " + Operator.ADD, ctx);
}
Interval interval = (Interval) left;
return new TimestampArithmetic(Operator.ADD, right, interval.value(), interval.timeUnit(), true);
}
if (right instanceof Interval) {
Operator op;
if (type == DorisParser.PLUS) {
op = Operator.ADD;
} else if (type == DorisParser.MINUS) {
op = Operator.SUBTRACT;
} else {
throw new ParseException("Only supported: " + Operator.ADD + " and " + Operator.SUBTRACT, ctx);
}
Interval interval = (Interval) right;
return new TimestampArithmetic(op, left, interval.value(), interval.timeUnit(), false);
}
return ParserUtils.withOrigin(ctx, () -> {
switch (type) {
case DorisParser.ASTERISK:
return new Multiply(left, right);
case DorisParser.SLASH:
return new Divide(left, right);
case DorisParser.PERCENT:
return new Mod(left, right);
case DorisParser.PLUS:
return new Add(left, right);
case DorisParser.MINUS:
return new Subtract(left, right);
case DorisParser.DIV:
return new IntegralDivide(left, right);
case DorisParser.HAT:
return new BitXor(left, right);
case DorisParser.PIPE:
return new BitOr(left, right);
case DorisParser.AMPERSAND:
return new BitAnd(left, right);
default:
throw new ParseException(
"Unsupported arithmetic binary type: " + ctx.operator.getText(), ctx);
}
});
});
}
@Override
public Expression visitTimestampdiff(TimestampdiffContext ctx) {
Expression start = (Expression) visit(ctx.startTimestamp);
Expression end = (Expression) visit(ctx.endTimestamp);
String unit = ctx.unit.getText();
if ("YEAR".equalsIgnoreCase(unit)) {
return new YearsDiff(end, start);
} else if ("MONTH".equalsIgnoreCase(unit)) {
return new MonthsDiff(end, start);
} else if ("WEEK".equalsIgnoreCase(unit)) {
return new WeeksDiff(end, start);
} else if ("DAY".equalsIgnoreCase(unit)) {
return new DaysDiff(end, start);
} else if ("HOUR".equalsIgnoreCase(unit)) {
return new HoursDiff(end, start);
} else if ("MINUTE".equalsIgnoreCase(unit)) {
return new MinutesDiff(end, start);
} else if ("SECOND".equalsIgnoreCase(unit)) {
return new SecondsDiff(end, start);
}
throw new ParseException("Unsupported time stamp diff time unit: " + unit
+ ", supported time unit: YEAR/MONTH/WEEK/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitTimestampadd(TimestampaddContext ctx) {
Expression start = (Expression) visit(ctx.startTimestamp);
Expression end = (Expression) visit(ctx.endTimestamp);
String unit = ctx.unit.getText();
if ("YEAR".equalsIgnoreCase(unit)) {
return new YearsAdd(end, start);
} else if ("MONTH".equalsIgnoreCase(unit)) {
return new MonthsAdd(end, start);
} else if ("WEEK".equalsIgnoreCase(unit)) {
return new WeeksAdd(end, start);
} else if ("DAY".equalsIgnoreCase(unit)) {
return new DaysAdd(end, start);
} else if ("HOUR".equalsIgnoreCase(unit)) {
return new HoursAdd(end, start);
} else if ("MINUTE".equalsIgnoreCase(unit)) {
return new MinutesAdd(end, start);
} else if ("SECOND".equalsIgnoreCase(unit)) {
return new SecondsAdd(end, start);
}
throw new ParseException("Unsupported time stamp add time unit: " + unit
+ ", supported time unit: YEAR/MONTH/WEEK/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitDate_add(Date_addContext ctx) {
Expression timeStamp = (Expression) visit(ctx.timestamp);
Expression amount = (Expression) visit(ctx.unitsAmount);
if (ctx.unit == null) {
return new DaysAdd(timeStamp, amount);
}
if ("Year".equalsIgnoreCase(ctx.unit.getText())) {
return new YearsAdd(timeStamp, amount);
} else if ("MONTH".equalsIgnoreCase(ctx.unit.getText())) {
return new MonthsAdd(timeStamp, amount);
} else if ("WEEK".equalsIgnoreCase(ctx.unit.getText())) {
return new WeeksAdd(timeStamp, amount);
} else if ("DAY".equalsIgnoreCase(ctx.unit.getText())) {
return new DaysAdd(timeStamp, amount);
} else if ("Hour".equalsIgnoreCase(ctx.unit.getText())) {
return new HoursAdd(timeStamp, amount);
} else if ("Minute".equalsIgnoreCase(ctx.unit.getText())) {
return new MinutesAdd(timeStamp, amount);
} else if ("Second".equalsIgnoreCase(ctx.unit.getText())) {
return new SecondsAdd(timeStamp, amount);
}
throw new ParseException("Unsupported time unit: " + ctx.unit
+ ", supported time unit: YEAR/MONTH/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitDate_sub(Date_subContext ctx) {
Expression timeStamp = (Expression) visit(ctx.timestamp);
Expression amount = (Expression) visit(ctx.unitsAmount);
if (ctx.unit == null) {
return new DaysSub(timeStamp, amount);
}
if ("Year".equalsIgnoreCase(ctx.unit.getText())) {
return new YearsSub(timeStamp, amount);
} else if ("MONTH".equalsIgnoreCase(ctx.unit.getText())) {
return new MonthsSub(timeStamp, amount);
} else if ("WEEK".equalsIgnoreCase(ctx.unit.getText())) {
return new WeeksSub(timeStamp, amount);
} else if ("DAY".equalsIgnoreCase(ctx.unit.getText())) {
return new DaysSub(timeStamp, amount);
} else if ("Hour".equalsIgnoreCase(ctx.unit.getText())) {
return new HoursSub(timeStamp, amount);
} else if ("Minute".equalsIgnoreCase(ctx.unit.getText())) {
return new MinutesSub(timeStamp, amount);
} else if ("Second".equalsIgnoreCase(ctx.unit.getText())) {
return new SecondsSub(timeStamp, amount);
}
throw new ParseException("Unsupported time unit: " + ctx.unit
+ ", supported time unit: YEAR/MONTH/DAY/HOUR/MINUTE/SECOND", ctx);
}
@Override
public Expression visitDoublePipes(DorisParser.DoublePipesContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression left = getExpression(ctx.left);
Expression right = getExpression(ctx.right);
if (ConnectContext.get().getSessionVariable().getSqlMode() == SqlModeHelper.MODE_PIPES_AS_CONCAT) {
return new UnboundFunction("concat", Lists.newArrayList(left, right));
} else {
return new Or(left, right);
}
});
}
/**
* Create a value based [[CaseWhen]] expression. This has the following SQL form:
* {{{
* CASE [expression]
* WHEN [value] THEN [expression]
* ...
* ELSE [expression]
* END
* }}}
*/
@Override
public Expression visitSimpleCase(DorisParser.SimpleCaseContext context) {
Expression e = getExpression(context.value);
List<WhenClause> whenClauses = context.whenClause().stream()
.map(w -> new WhenClause(new EqualTo(e, getExpression(w.condition)), getExpression(w.result)))
.collect(ImmutableList.toImmutableList());
if (context.elseExpression == null) {
return new CaseWhen(whenClauses);
}
return new CaseWhen(whenClauses, getExpression(context.elseExpression));
}
/**
* Create a condition based [[CaseWhen]] expression. This has the following SQL syntax:
* {{{
* CASE
* WHEN [predicate] THEN [expression]
* ...
* ELSE [expression]
* END
* }}}
*
* @param context the parse tree
*/
@Override
public Expression visitSearchedCase(DorisParser.SearchedCaseContext context) {
List<WhenClause> whenClauses = context.whenClause().stream()
.map(w -> new WhenClause(getExpression(w.condition), getExpression(w.result)))
.collect(ImmutableList.toImmutableList());
if (context.elseExpression == null) {
return new CaseWhen(whenClauses);
}
return new CaseWhen(whenClauses, getExpression(context.elseExpression));
}
@Override
public Expression visitCast(DorisParser.CastContext ctx) {
return ParserUtils.withOrigin(ctx, () ->
new Cast(getExpression(ctx.expression()), typedVisit(ctx.dataType())));
}
@Override
public UnboundFunction visitExtract(DorisParser.ExtractContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
String functionName = ctx.field.getText();
return new UnboundFunction(functionName, false,
Collections.singletonList(getExpression(ctx.source)));
});
}
@Override
public Expression visitFunctionCall(DorisParser.FunctionCallContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
String functionName = ctx.functionIdentifier().getText();
boolean isDistinct = ctx.DISTINCT() != null;
List<Expression> params = visit(ctx.expression(), Expression.class);
List<OrderKey> orderKeys = visit(ctx.sortItem(), OrderKey.class);
if (!orderKeys.isEmpty()) {
return parseFunctionWithOrderKeys(functionName, isDistinct, params, orderKeys, ctx);
}
List<UnboundStar> unboundStars = ExpressionUtils.collectAll(params, UnboundStar.class::isInstance);
if (unboundStars.size() > 0) {
if (functionName.equalsIgnoreCase("count")) {
if (unboundStars.size() > 1) {
throw new ParseException(
"'*' can only be used once in conjunction with COUNT: " + functionName, ctx);
}
if (!unboundStars.get(0).getQualifier().isEmpty()) {
throw new ParseException("'*' can not has qualifier: " + unboundStars.size(), ctx);
}
if (ctx.windowSpec() != null) {
throw new ParseException(
"COUNT(*) isn't supported as window function; can use COUNT(col)", ctx);
}
return new Count();
}
throw new ParseException("'*' can only be used in conjunction with COUNT: " + functionName, ctx);
} else {
UnboundFunction function = new UnboundFunction(functionName, isDistinct, params);
if (ctx.windowSpec() != null) {
if (isDistinct) {
throw new ParseException("DISTINCT not allowed in analytic function: " + functionName, ctx);
}
return withWindowSpec(ctx.windowSpec(), function);
}
return function;
}
});
}
/**
* deal with window function definition
*/
private WindowExpression withWindowSpec(WindowSpecContext ctx, Expression function) {
List<Expression> partitionKeyList = Lists.newArrayList();
if (ctx.partitionClause() != null) {
partitionKeyList = visit(ctx.partitionClause().expression(), Expression.class);
}
List<OrderExpression> orderKeyList = Lists.newArrayList();
if (ctx.sortClause() != null) {
orderKeyList = visit(ctx.sortClause().sortItem(), OrderKey.class).stream()
.map(orderKey -> new OrderExpression(orderKey))
.collect(Collectors.toList());
}
if (ctx.windowFrame() != null) {
return new WindowExpression(function, partitionKeyList, orderKeyList, withWindowFrame(ctx.windowFrame()));
}
return new WindowExpression(function, partitionKeyList, orderKeyList);
}
/**
* deal with optional expressions
*/
private <T, C> Optional<C> optionalVisit(T ctx, Supplier<C> func) {
return Optional.ofNullable(ctx).map(a -> func.get());
}
/**
* deal with window frame
*/
private WindowFrame withWindowFrame(WindowFrameContext ctx) {
WindowFrame.FrameUnitsType frameUnitsType = WindowFrame.FrameUnitsType.valueOf(
ctx.frameUnits().getText().toUpperCase());
WindowFrame.FrameBoundary leftBoundary = withFrameBound(ctx.start);
if (ctx.end != null) {
WindowFrame.FrameBoundary rightBoundary = withFrameBound(ctx.end);
return new WindowFrame(frameUnitsType, leftBoundary, rightBoundary);
}
return new WindowFrame(frameUnitsType, leftBoundary);
}
private WindowFrame.FrameBoundary withFrameBound(DorisParser.FrameBoundaryContext ctx) {
Optional<Expression> expression = Optional.empty();
if (ctx.expression() != null) {
expression = Optional.of(getExpression(ctx.expression()));
if (!expression.get().isLiteral()) {
throw new ParseException("Unsupported expression in WindowFrame : " + expression, ctx);
}
}
WindowFrame.FrameBoundType frameBoundType = null;
switch (ctx.boundType.getType()) {
case DorisParser.PRECEDING:
if (ctx.UNBOUNDED() != null) {
frameBoundType = WindowFrame.FrameBoundType.UNBOUNDED_PRECEDING;
} else {
frameBoundType = WindowFrame.FrameBoundType.PRECEDING;
}
break;
case DorisParser.CURRENT:
frameBoundType = WindowFrame.FrameBoundType.CURRENT_ROW;
break;
case DorisParser.FOLLOWING:
if (ctx.UNBOUNDED() != null) {
frameBoundType = WindowFrame.FrameBoundType.UNBOUNDED_FOLLOWING;
} else {
frameBoundType = WindowFrame.FrameBoundType.FOLLOWING;
}
break;
default:
}
return new WindowFrame.FrameBoundary(expression, frameBoundType);
}
@Override
public Expression visitInterval(IntervalContext ctx) {
return new Interval(getExpression(ctx.value), visitUnitIdentifier(ctx.unit));
}
@Override
public String visitUnitIdentifier(UnitIdentifierContext ctx) {
return ctx.getText();
}
@Override
public Expression visitTypeConstructor(TypeConstructorContext ctx) {
String value = ctx.STRING().getText();
value = value.substring(1, value.length() - 1);
String type = ctx.type.getText().toUpperCase();
switch (type) {
case "DATE":
return new DateLiteral(value);
case "TIMESTAMP":
return new DateTimeLiteral(value);
case "DATEV2":
return new DateV2Literal(value);
default:
throw new ParseException("Unsupported data type : " + type, ctx);
}
}
@Override
public Expression visitDereference(DereferenceContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression e = getExpression(ctx.base);
if (e instanceof UnboundSlot) {
UnboundSlot unboundAttribute = (UnboundSlot) e;
List<String> nameParts = Lists.newArrayList(unboundAttribute.getNameParts());
nameParts.add(ctx.fieldName.getText());
return new UnboundSlot(nameParts);
} else {
throw new ParseException("Unsupported dereference expression: " + ctx.getText(), ctx);
}
});
}
@Override
public UnboundSlot visitColumnReference(ColumnReferenceContext ctx) {
return UnboundSlot.quoted(ctx.getText());
}
/**
* Create a NULL literal expression.
*/
@Override
public Expression visitNullLiteral(NullLiteralContext ctx) {
return new NullLiteral();
}
@Override
public Literal visitBooleanLiteral(BooleanLiteralContext ctx) {
Boolean b = Boolean.valueOf(ctx.getText());
return BooleanLiteral.of(b);
}
@Override
public Literal visitIntegerLiteral(IntegerLiteralContext ctx) {
BigInteger bigInt = new BigInteger(ctx.getText());
if (BigInteger.valueOf(bigInt.byteValue()).equals(bigInt)) {
return new TinyIntLiteral(bigInt.byteValue());
} else if (BigInteger.valueOf(bigInt.shortValue()).equals(bigInt)) {
return new SmallIntLiteral(bigInt.shortValue());
} else if (BigInteger.valueOf(bigInt.intValue()).equals(bigInt)) {
return new IntegerLiteral(bigInt.intValue());
} else if (BigInteger.valueOf(bigInt.longValue()).equals(bigInt)) {
return new BigIntLiteral(bigInt.longValueExact());
} else {
return new LargeIntLiteral(bigInt);
}
}
@Override
public Literal visitStringLiteral(StringLiteralContext ctx) {
String txt = ctx.STRING().getText();
String s = escapeBackSlash(txt.substring(1, txt.length() - 1));
return new VarcharLiteral(s);
}
private String escapeBackSlash(String str) {
StringBuilder sb = new StringBuilder();
int strLen = str.length();
for (int i = 0; i < strLen; ++i) {
char c = str.charAt(i);
if (c == '\\' && (i + 1) < strLen) {
switch (str.charAt(i + 1)) {
case 'n':
sb.append('\n');
break;
case 't':
sb.append('\t');
break;
case 'r':
sb.append('\r');
break;
case 'b':
sb.append('\b');
break;
case '0':
sb.append('\0');
break;
case 'Z':
sb.append('\032');
break;
case '_':
case '%':
sb.append('\\');
sb.append(str.charAt(i + 1));
break;
default:
sb.append(str.charAt(i + 1));
break;
}
i++;
} else {
sb.append(c);
}
}
return sb.toString();
}
@Override
public Expression visitParenthesizedExpression(ParenthesizedExpressionContext ctx) {
return getExpression(ctx.expression());
}
@Override
public List<Expression> visitNamedExpressionSeq(NamedExpressionSeqContext namedCtx) {
return visit(namedCtx.namedExpression(), Expression.class);
}
@Override
public LogicalPlan visitRelation(RelationContext ctx) {
return plan(ctx.relationPrimary());
}
@Override
public LogicalPlan visitFromClause(FromClauseContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan left = null;
for (RelationContext relation : ctx.relation()) {
LogicalPlan right = visitRelation(relation);
left = (left == null) ? right :
new LogicalJoin<>(
JoinType.CROSS_JOIN,
ExpressionUtils.EMPTY_CONDITION,
ExpressionUtils.EMPTY_CONDITION,
JoinHint.NONE,
left,
right);
left = withJoinRelations(left, relation);
}
return left;
});
}
/* ********************************************************************************************
* Table Identifier parsing
* ******************************************************************************************** */
@Override
public List<String> visitMultipartIdentifier(MultipartIdentifierContext ctx) {
return ctx.parts.stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
}
/**
* Create a Sequence of Strings for a parenthesis enclosed alias list.
*/
@Override
public List<String> visitIdentifierList(IdentifierListContext ctx) {
return visitIdentifierSeq(ctx.identifierSeq());
}
/**
* Create a Sequence of Strings for an identifier list.
*/
@Override
public List<String> visitIdentifierSeq(IdentifierSeqContext ctx) {
return ctx.ident.stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
}
/**
* get OrderKey.
*
* @param ctx SortItemContext
* @return SortItems
*/
@Override
public OrderKey visitSortItem(SortItemContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
boolean isAsc = ctx.DESC() == null;
boolean isNullFirst = ctx.FIRST() != null || (ctx.LAST() == null && isAsc);
Expression expression = typedVisit(ctx.expression());
return new OrderKey(expression, isAsc, isNullFirst);
});
}
private <T> List<T> visit(List<? extends ParserRuleContext> contexts, Class<T> clazz) {
return contexts.stream()
.map(this::visit)
.map(clazz::cast)
.collect(ImmutableList.toImmutableList());
}
private LogicalPlan plan(ParserRuleContext tree) {
return (LogicalPlan) tree.accept(this);
}
/* ********************************************************************************************
* Expression parsing
* ******************************************************************************************** */
/**
* Create an expression from the given context. This method just passes the context on to the
* visitor and only takes care of typing (We assume that the visitor returns an Expression here).
*/
private Expression getExpression(ParserRuleContext ctx) {
return typedVisit(ctx);
}
private LogicalPlan withExplain(LogicalPlan inputPlan, ExplainContext ctx) {
if (ctx == null) {
return inputPlan;
}
return ParserUtils.withOrigin(ctx, () -> {
ExplainLevel explainLevel = ExplainLevel.NORMAL;
if (ctx.planType() != null) {
if (ctx.level == null || !ctx.level.getText().equalsIgnoreCase("plan")) {
throw new ParseException("Only explain plan can use plan type: " + ctx.planType().getText(), ctx);
}
}
if (ctx.level != null) {
if (!ctx.level.getText().equalsIgnoreCase("plan")) {
explainLevel = ExplainLevel.valueOf(ctx.level.getText().toUpperCase(Locale.ROOT));
} else {
explainLevel = parseExplainPlanType(ctx.planType());
}
}
return new ExplainCommand(explainLevel, inputPlan);
});
}
private LogicalPlan withQueryOrganization(LogicalPlan inputPlan, QueryOrganizationContext ctx) {
if (ctx == null) {
return inputPlan;
}
Optional<SortClauseContext> sortClauseContext = Optional.ofNullable(ctx.sortClause());
Optional<LimitClauseContext> limitClauseContext = Optional.ofNullable(ctx.limitClause());
LogicalPlan sort = withSort(inputPlan, sortClauseContext);
return withLimit(sort, limitClauseContext);
}
private LogicalPlan withSort(LogicalPlan input, Optional<SortClauseContext> sortCtx) {
return input.optionalMap(sortCtx, () -> {
List<OrderKey> orderKeys = visit(sortCtx.get().sortItem(), OrderKey.class);
return new LogicalSort<>(orderKeys, input);
});
}
private LogicalPlan withLimit(LogicalPlan input, Optional<LimitClauseContext> limitCtx) {
return input.optionalMap(limitCtx, () -> {
long limit = Long.parseLong(limitCtx.get().limit.getText());
if (limit < 0) {
throw new ParseException("Limit requires non-negative number", limitCtx.get());
}
long offset = 0;
Token offsetToken = limitCtx.get().offset;
if (offsetToken != null) {
offset = Long.parseLong(offsetToken.getText());
}
return new LogicalLimit<>(limit, offset, input);
});
}
private UnboundOneRowRelation withOneRowRelation(SelectColumnClauseContext selectCtx) {
return ParserUtils.withOrigin(selectCtx, () -> {
List<NamedExpression> projects = getNamedExpressions(selectCtx.namedExpressionSeq());
return new UnboundOneRowRelation(RelationUtil.newRelationId(), projects);
});
}
/**
* Add a regular (SELECT) query specification to a logical plan. The query specification
* is the core of the logical plan, this is where sourcing (FROM clause), projection (SELECT),
* aggregation (GROUP BY ... HAVING ...) and filtering (WHERE) takes place.
*
* <p>Note that query hints are ignored (both by the parser and the builder).
*/
private LogicalPlan withSelectQuerySpecification(
ParserRuleContext ctx,
LogicalPlan inputRelation,
SelectClauseContext selectClause,
Optional<WhereClauseContext> whereClause,
Optional<AggClauseContext> aggClause,
Optional<HavingClauseContext> havingClause) {
return ParserUtils.withOrigin(ctx, () -> {
LogicalPlan filter = withFilter(inputRelation, whereClause);
SelectColumnClauseContext selectColumnCtx = selectClause.selectColumnClause();
LogicalPlan aggregate = withAggregate(filter, selectColumnCtx, aggClause);
boolean isDistinct = (selectClause.DISTINCT() != null);
if (isDistinct && aggregate instanceof Aggregate) {
throw new ParseException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY",
selectClause);
}
if (!(aggregate instanceof Aggregate) && havingClause.isPresent()) {
LogicalPlan project;
if (selectColumnCtx.EXCEPT() != null) {
List<NamedExpression> expressions = getNamedExpressions(selectColumnCtx.namedExpressionSeq());
if (!expressions.stream().allMatch(UnboundSlot.class::isInstance)) {
throw new ParseException("only column name is supported in except clause", selectColumnCtx);
}
project = new LogicalProject<>(ImmutableList.of(new UnboundStar(ImmutableList.of())),
expressions, aggregate, isDistinct);
} else {
List<NamedExpression> projects = getNamedExpressions(selectColumnCtx.namedExpressionSeq());
project = new LogicalProject<>(projects, ImmutableList.of(), aggregate, isDistinct);
}
return new LogicalHaving<>(ExpressionUtils.extractConjunctionToSet(
getExpression((havingClause.get().booleanExpression()))), project);
} else {
LogicalPlan having = withHaving(aggregate, havingClause);
return withProjection(having, selectColumnCtx, aggClause, isDistinct);
}
});
}
/**
* Join one more [[LogicalPlan]]s to the current logical plan.
*/
private LogicalPlan withJoinRelations(LogicalPlan input, RelationContext ctx) {
LogicalPlan last = input;
for (JoinRelationContext join : ctx.joinRelation()) {
JoinType joinType;
if (join.joinType().CROSS() != null) {
joinType = JoinType.CROSS_JOIN;
} else if (join.joinType().FULL() != null) {
joinType = JoinType.FULL_OUTER_JOIN;
} else if (join.joinType().SEMI() != null) {
if (join.joinType().LEFT() != null) {
joinType = JoinType.LEFT_SEMI_JOIN;
} else {
joinType = JoinType.RIGHT_SEMI_JOIN;
}
} else if (join.joinType().ANTI() != null) {
if (join.joinType().LEFT() != null) {
joinType = JoinType.LEFT_ANTI_JOIN;
} else {
joinType = JoinType.RIGHT_ANTI_JOIN;
}
} else if (join.joinType().LEFT() != null) {
joinType = JoinType.LEFT_OUTER_JOIN;
} else if (join.joinType().RIGHT() != null) {
joinType = JoinType.RIGHT_OUTER_JOIN;
} else if (join.joinType().INNER() != null) {
joinType = JoinType.INNER_JOIN;
} else if (join.joinCriteria() != null) {
joinType = JoinType.INNER_JOIN;
} else {
joinType = JoinType.CROSS_JOIN;
}
JoinHint joinHint = Optional.ofNullable(join.joinHint()).map(hintCtx -> {
String hint = typedVisit(join.joinHint());
if (JoinHint.JoinHintType.SHUFFLE.toString().equalsIgnoreCase(hint)) {
return JoinHint.SHUFFLE_RIGHT;
} else if (JoinHint.JoinHintType.BROADCAST.toString().equalsIgnoreCase(hint)) {
return JoinHint.BROADCAST_RIGHT;
} else {
throw new ParseException("Invalid join hint: " + hint, hintCtx);
}
}).orElse(JoinHint.NONE);
JoinCriteriaContext joinCriteria = join.joinCriteria();
Optional<Expression> condition = Optional.empty();
List<Expression> ids = null;
if (joinCriteria != null) {
if (join.joinType().CROSS() != null) {
throw new ParseException("Cross join can't be used with ON clause", joinCriteria);
}
if (joinCriteria.booleanExpression() != null) {
condition = Optional.ofNullable(getExpression(joinCriteria.booleanExpression()));
} else if (joinCriteria.USING() != null) {
ids = visitIdentifierList(joinCriteria.identifierList())
.stream().map(UnboundSlot::quoted)
.collect(ImmutableList.toImmutableList());
}
} else {
if (!joinType.isInnerOrCrossJoin()) {
throw new ParseException("on mustn't be empty except for cross/inner join", join);
}
}
if (ids == null) {
last = new LogicalJoin<>(joinType, ExpressionUtils.EMPTY_CONDITION,
condition.map(ExpressionUtils::extractConjunction)
.orElse(ExpressionUtils.EMPTY_CONDITION),
joinHint,
last,
plan(join.relationPrimary()));
} else {
last = new UsingJoin<>(joinType, last,
plan(join.relationPrimary()), ImmutableList.of(), ids, joinHint);
}
}
return last;
}
private LogicalPlan withSelectHint(LogicalPlan logicalPlan, SelectHintContext hintContext) {
if (hintContext == null) {
return logicalPlan;
}
Map<String, SelectHint> hints = Maps.newLinkedHashMap();
for (HintStatementContext hintStatement : hintContext.hintStatements) {
String hintName = hintStatement.hintName.getText().toLowerCase(Locale.ROOT);
Map<String, Optional<String>> parameters = Maps.newLinkedHashMap();
for (HintAssignmentContext kv : hintStatement.parameters) {
String parameterName = kv.key.getText();
Optional<String> value = Optional.empty();
if (kv.constantValue != null) {
Literal literal = (Literal) visit(kv.constantValue);
value = Optional.ofNullable(literal.toLegacyLiteral().getStringValue());
} else if (kv.identifierValue != null) {
value = Optional.ofNullable(kv.identifierValue.getText());
}
parameters.put(parameterName, value);
}
hints.put(hintName, new SelectHint(hintName, parameters));
}
return new LogicalSelectHint<>(hints, logicalPlan);
}
@Override
public String visitBracketJoinHint(BracketJoinHintContext ctx) {
return ctx.identifier().getText();
}
@Override
public String visitCommentJoinHint(CommentJoinHintContext ctx) {
return ctx.identifier().getText();
}
@Override
public List<String> visitBracketRelationHint(BracketRelationHintContext ctx) {
return ctx.identifier().stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
}
@Override
public Object visitCommentRelationHint(CommentRelationHintContext ctx) {
return ctx.identifier().stream()
.map(RuleContext::getText)
.collect(ImmutableList.toImmutableList());
}
private LogicalPlan withProjection(LogicalPlan input, SelectColumnClauseContext selectCtx,
Optional<AggClauseContext> aggCtx, boolean isDistinct) {
return ParserUtils.withOrigin(selectCtx, () -> {
if (aggCtx.isPresent()) {
return input;
} else {
if (selectCtx.EXCEPT() != null) {
List<NamedExpression> expressions = getNamedExpressions(selectCtx.namedExpressionSeq());
if (!expressions.stream().allMatch(UnboundSlot.class::isInstance)) {
throw new ParseException("only column name is supported in except clause", selectCtx);
}
return new LogicalProject<>(ImmutableList.of(new UnboundStar(ImmutableList.of())),
expressions, input, isDistinct);
} else {
List<NamedExpression> projects = getNamedExpressions(selectCtx.namedExpressionSeq());
return new LogicalProject<>(projects, Collections.emptyList(), input, isDistinct);
}
}
});
}
private LogicalPlan withFilter(LogicalPlan input, Optional<WhereClauseContext> whereCtx) {
return input.optionalMap(whereCtx, () ->
new LogicalFilter<>(ExpressionUtils.extractConjunctionToSet(
getExpression(whereCtx.get().booleanExpression())), input));
}
private LogicalPlan withAggregate(LogicalPlan input, SelectColumnClauseContext selectCtx,
Optional<AggClauseContext> aggCtx) {
return input.optionalMap(aggCtx, () -> {
GroupingElementContext groupingElementContext = aggCtx.get().groupingElement();
List<NamedExpression> namedExpressions = getNamedExpressions(selectCtx.namedExpressionSeq());
if (groupingElementContext.GROUPING() != null) {
ImmutableList.Builder<List<Expression>> groupingSets = ImmutableList.builder();
for (GroupingSetContext groupingSetContext : groupingElementContext.groupingSet()) {
groupingSets.add(visit(groupingSetContext.expression(), Expression.class));
}
return new LogicalRepeat<>(groupingSets.build(), namedExpressions, input);
} else if (groupingElementContext.CUBE() != null) {
List<Expression> cubeExpressions = visit(groupingElementContext.expression(), Expression.class);
List<List<Expression>> groupingSets = ExpressionUtils.cubeToGroupingSets(cubeExpressions);
return new LogicalRepeat<>(groupingSets, namedExpressions, input);
} else if (groupingElementContext.ROLLUP() != null) {
List<Expression> rollupExpressions = visit(groupingElementContext.expression(), Expression.class);
List<List<Expression>> groupingSets = ExpressionUtils.rollupToGroupingSets(rollupExpressions);
return new LogicalRepeat<>(groupingSets, namedExpressions, input);
} else {
List<Expression> groupByExpressions = visit(groupingElementContext.expression(), Expression.class);
return new LogicalAggregate<>(groupByExpressions, namedExpressions, input);
}
});
}
private LogicalPlan withHaving(LogicalPlan input, Optional<HavingClauseContext> havingCtx) {
return input.optionalMap(havingCtx, () -> {
if (!(input instanceof Aggregate)) {
throw new ParseException("Having clause should be applied against an aggregation.", havingCtx.get());
}
return new LogicalHaving<>(ExpressionUtils.extractConjunctionToSet(
getExpression((havingCtx.get().booleanExpression()))), input);
});
}
/**
* match predicate type and generate different predicates.
*
* @param ctx PredicateContext
* @param valueExpression valueExpression
* @return Expression
*/
private Expression withPredicate(Expression valueExpression, PredicateContext ctx) {
return ParserUtils.withOrigin(ctx, () -> {
Expression outExpression;
switch (ctx.kind.getType()) {
case DorisParser.BETWEEN:
outExpression = new Between(
valueExpression,
getExpression(ctx.lower),
getExpression(ctx.upper)
);
break;
case DorisParser.LIKE:
outExpression = new Like(
valueExpression,
getExpression(ctx.pattern)
);
break;
case DorisParser.RLIKE:
case DorisParser.REGEXP:
outExpression = new Regexp(
valueExpression,
getExpression(ctx.pattern)
);
break;
case DorisParser.IN:
if (ctx.query() == null) {
outExpression = new InPredicate(
valueExpression,
withInList(ctx)
);
} else {
outExpression = new InSubquery(
valueExpression,
new ListQuery(typedVisit(ctx.query())),
ctx.NOT() != null
);
}
break;
case DorisParser.NULL:
outExpression = new IsNull(valueExpression);
break;
default:
throw new ParseException("Unsupported predicate type: " + ctx.kind.getText(), ctx);
}
return ctx.NOT() != null ? new Not(outExpression) : outExpression;
});
}
private List<NamedExpression> getNamedExpressions(NamedExpressionSeqContext namedCtx) {
return ParserUtils.withOrigin(namedCtx, () -> {
List<Expression> expressions = visit(namedCtx.namedExpression(), Expression.class);
return expressions.stream().map(expression -> {
if (expression instanceof NamedExpression) {
return (NamedExpression) expression;
} else {
return new UnboundAlias(expression);
}
}).collect(ImmutableList.toImmutableList());
});
}
@Override
public Expression visitSubqueryExpression(SubqueryExpressionContext subqueryExprCtx) {
return ParserUtils.withOrigin(subqueryExprCtx, () -> new ScalarSubquery(typedVisit(subqueryExprCtx.query())));
}
@Override
public Expression visitExist(ExistContext context) {
return ParserUtils.withOrigin(context, () -> new Exists(typedVisit(context.query()), false));
}
@Override
public Expression visitIsnull(IsnullContext context) {
return ParserUtils.withOrigin(context, () -> new IsNull(typedVisit(context.valueExpression())));
}
@Override
public Expression visitIs_not_null_pred(Is_not_null_predContext context) {
return ParserUtils.withOrigin(context, () -> new Not(new IsNull(typedVisit(context.valueExpression()))));
}
public List<Expression> withInList(PredicateContext ctx) {
return ctx.expression().stream().map(this::getExpression).collect(ImmutableList.toImmutableList());
}
@Override
public DecimalLiteral visitDecimalLiteral(DecimalLiteralContext ctx) {
return new DecimalLiteral(new BigDecimal(ctx.getText()));
}
private String parseTVFPropertyItem(TvfPropertyItemContext item) {
if (item.constant() != null) {
Object constant = visit(item.constant());
if (constant instanceof Literal && ((Literal) constant).isStringLiteral()) {
return ((Literal) constant).getStringValue();
}
}
return item.getText();
}
private ExplainLevel parseExplainPlanType(PlanTypeContext planTypeContext) {
if (planTypeContext == null || planTypeContext.ALL() != null) {
return ExplainLevel.ALL_PLAN;
}
if (planTypeContext.PHYSICAL() != null || planTypeContext.OPTIMIZED() != null) {
return ExplainLevel.OPTIMIZED_PLAN;
}
if (planTypeContext.REWRITTEN() != null || planTypeContext.LOGICAL() != null) {
return ExplainLevel.REWRITTEN_PLAN;
}
if (planTypeContext.ANALYZED() != null) {
return ExplainLevel.ANALYZED_PLAN;
}
if (planTypeContext.PARSED() != null) {
return ExplainLevel.PARSED_PLAN;
}
return ExplainLevel.ALL_PLAN;
}
@Override
public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) {
String dataType = ctx.identifier().getText().toLowerCase(Locale.ROOT);
List<String> l = Lists.newArrayList(dataType);
ctx.INTEGER_VALUE().stream().map(ParseTree::getText).forEach(l::add);
return DataType.convertPrimitiveFromStrings(l);
}
private Expression parseFunctionWithOrderKeys(String functionName, boolean isDistinct,
List<Expression> params, List<OrderKey> orderKeys, ParserRuleContext ctx) {
if (functionName.equalsIgnoreCase("group_concat")) {
OrderExpression[] orderExpressions = orderKeys.stream()
.map(OrderExpression::new)
.toArray(OrderExpression[]::new);
if (params.size() == 1) {
return new GroupConcat(isDistinct, params.get(0), orderExpressions);
} else if (params.size() == 2) {
return new GroupConcat(isDistinct, params.get(0), params.get(1), orderExpressions);
} else {
throw new ParseException("group_concat requires one or two parameters: " + params, ctx);
}
}
throw new ParseException("Unsupported function with order expressions" + ctx.getText(), ctx);
}
} |
```suggestion // Note this takes ownership of newValues. This object is no longer used after it has been closed. ``` | public void asyncClose() throws Exception {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
isClosed = true;
if (!isCleared && newValues.isEmpty()) {
return;
}
if (isCleared) {
beamFnStateClient.handle(
request.toBuilder().setClear(StateClearRequest.getDefaultInstance()));
}
if (!newValues.isEmpty()) {
ByteString.Output out = ByteString.newOutput();
for (T newValue : newValues) {
valueCoder.encode(newValue, out);
}
beamFnStateClient.handle(
request
.toBuilder()
.setAppend(StateAppendRequest.newBuilder().setData(out.toByteString())));
}
if (isCleared) {
oldValues.clearAndAppend(newValues);
} else {
oldValues.append(newValues);
}
} | public void asyncClose() throws Exception {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
isClosed = true;
if (!isCleared && newValues.isEmpty()) {
return;
}
if (isCleared) {
beamFnStateClient.handle(
request.toBuilder().setClear(StateClearRequest.getDefaultInstance()));
}
if (!newValues.isEmpty()) {
ByteString.Output out = ByteString.newOutput();
for (T newValue : newValues) {
valueCoder.encode(newValue, out);
}
beamFnStateClient.handle(
request
.toBuilder()
.setAppend(StateAppendRequest.newBuilder().setData(out.toByteString())));
}
if (isCleared) {
oldValues.clearAndAppend(newValues);
} else {
oldValues.append(newValues);
}
} | class BagUserState<T> {
private final Cache<?, ?> cache;
private final BeamFnStateClient beamFnStateClient;
private final StateRequest request;
private final Coder<T> valueCoder;
private final CachingStateIterable<T> oldValues;
private List<T> newValues;
private boolean isCleared;
private boolean isClosed;
/** The cache must be namespaced for this state object accordingly. */
public BagUserState(
Cache<?, ?> cache,
BeamFnStateClient beamFnStateClient,
String instructionId,
StateKey stateKey,
Coder<T> valueCoder) {
checkArgument(
stateKey.hasBagUserState(), "Expected BagUserState StateKey but received %s.", stateKey);
this.cache = cache;
this.beamFnStateClient = beamFnStateClient;
this.valueCoder = valueCoder;
this.request =
StateRequest.newBuilder().setInstructionId(instructionId).setStateKey(stateKey).build();
this.oldValues =
StateFetchingIterators.readAllAndDecodeStartingFrom(
this.cache, beamFnStateClient, request, valueCoder);
this.newValues = new ArrayList<>();
}
public PrefetchableIterable<T> get() {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
if (isCleared) {
return PrefetchableIterables.limit(Collections.unmodifiableList(newValues), newValues.size());
} else if (newValues.isEmpty()) {
return oldValues;
}
return PrefetchableIterables.concat(
oldValues, Iterables.limit(Collections.unmodifiableList(newValues), newValues.size()));
}
public void append(T t) {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
newValues.add(t);
}
public void clear() {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
isCleared = true;
newValues = new ArrayList<>();
}
@SuppressWarnings("FutureReturnValueIgnored")
} | class BagUserState<T> {
private final Cache<?, ?> cache;
private final BeamFnStateClient beamFnStateClient;
private final StateRequest request;
private final Coder<T> valueCoder;
private final CachingStateIterable<T> oldValues;
private List<T> newValues;
private boolean isCleared;
private boolean isClosed;
/** The cache must be namespaced for this state object accordingly. */
public BagUserState(
Cache<?, ?> cache,
BeamFnStateClient beamFnStateClient,
String instructionId,
StateKey stateKey,
Coder<T> valueCoder) {
checkArgument(
stateKey.hasBagUserState(), "Expected BagUserState StateKey but received %s.", stateKey);
this.cache = cache;
this.beamFnStateClient = beamFnStateClient;
this.valueCoder = valueCoder;
this.request =
StateRequest.newBuilder().setInstructionId(instructionId).setStateKey(stateKey).build();
this.oldValues =
StateFetchingIterators.readAllAndDecodeStartingFrom(
this.cache, beamFnStateClient, request, valueCoder);
this.newValues = new ArrayList<>();
}
public PrefetchableIterable<T> get() {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
if (isCleared) {
return PrefetchableIterables.limit(Collections.unmodifiableList(newValues), newValues.size());
} else if (newValues.isEmpty()) {
return oldValues;
}
return PrefetchableIterables.concat(
oldValues, Iterables.limit(Collections.unmodifiableList(newValues), newValues.size()));
}
public void append(T t) {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
newValues.add(t);
}
public void clear() {
checkState(
!isClosed,
"Bag user state is no longer usable because it is closed for %s",
request.getStateKey());
isCleared = true;
newValues = new ArrayList<>();
}
@SuppressWarnings("FutureReturnValueIgnored")
} |
|
it's kinda both! If the source is unbounded (streaming) - and the groupingFactor has not been specified by the user, then default to no grouping. | public SpannerWriteResult expand(PCollection<MutationGroup> input) {
PCollection<Void> schemaSeed =
input.getPipeline().apply("Create Seed", Create.of((Void) null));
if (spec.getSchemaReadySignal() != null) {
schemaSeed = schemaSeed.apply("Wait for schema", Wait.on(spec.getSchemaReadySignal()));
}
final PCollectionView<SpannerSchema> schemaView =
schemaSeed
.apply(
"Read information schema",
ParDo.of(new ReadSpannerSchema(spec.getSpannerConfig())))
.apply("Schema View", View.asSingleton());
PCollectionTuple filteredMutations =
input
.apply("To Global Window", Window.into(new GlobalWindows()))
.apply(
"Filter Unbatchable Mutations",
ParDo.of(
new BatchableMutationFilterFn(
schemaView,
UNBATCHABLE_MUTATIONS_TAG,
spec.getBatchSizeBytes(),
spec.getMaxNumMutations(),
spec.getMaxNumRows()))
.withSideInputs(schemaView)
.withOutputTags(
BATCHABLE_MUTATIONS_TAG, TupleTagList.of(UNBATCHABLE_MUTATIONS_TAG)));
PCollection<Iterable<MutationGroup>> batchedMutations =
filteredMutations
.get(BATCHABLE_MUTATIONS_TAG)
.apply(
"Gather And Sort",
ParDo.of(
new GatherBundleAndSortFn(
spec.getBatchSizeBytes(),
spec.getMaxNumMutations(),
spec.getMaxNumRows(),
spec.getGroupingFactor()
.orElse(
input.isBounded() == IsBounded.BOUNDED
? DEFAULT_GROUPING_FACTOR
: 1),
schemaView))
.withSideInputs(schemaView))
.apply(
"Create Batches",
ParDo.of(
new BatchFn(
spec.getBatchSizeBytes(),
spec.getMaxNumMutations(),
spec.getMaxNumRows(),
schemaView))
.withSideInputs(schemaView));
PCollectionTuple result =
PCollectionList.of(filteredMutations.get(UNBATCHABLE_MUTATIONS_TAG))
.and(batchedMutations)
.apply("Merge", Flatten.pCollections())
.apply(
"Write mutations to Spanner",
ParDo.of(
new WriteToSpannerFn(
spec.getSpannerConfig(), spec.getFailureMode(), FAILED_MUTATIONS_TAG))
.withOutputTags(MAIN_OUT_TAG, TupleTagList.of(FAILED_MUTATIONS_TAG)));
return new SpannerWriteResult(
input.getPipeline(),
result.get(MAIN_OUT_TAG),
result.get(FAILED_MUTATIONS_TAG),
FAILED_MUTATIONS_TAG);
} | ParDo.of( | public SpannerWriteResult expand(PCollection<MutationGroup> input) {
PCollection<Void> schemaSeed =
input.getPipeline().apply("Create Seed", Create.of((Void) null));
if (spec.getSchemaReadySignal() != null) {
schemaSeed = schemaSeed.apply("Wait for schema", Wait.on(spec.getSchemaReadySignal()));
}
final PCollectionView<SpannerSchema> schemaView =
schemaSeed
.apply(
"Read information schema",
ParDo.of(new ReadSpannerSchema(spec.getSpannerConfig())))
.apply("Schema View", View.asSingleton());
PCollectionTuple filteredMutations =
input
.apply(
"RewindowIntoGlobal",
Window.<MutationGroup>into(new GlobalWindows())
.triggering(DefaultTrigger.of())
.discardingFiredPanes())
.apply(
"Filter Unbatchable Mutations",
ParDo.of(
new BatchableMutationFilterFn(
schemaView,
UNBATCHABLE_MUTATIONS_TAG,
spec.getBatchSizeBytes(),
spec.getMaxNumMutations(),
spec.getMaxNumRows()))
.withSideInputs(schemaView)
.withOutputTags(
BATCHABLE_MUTATIONS_TAG, TupleTagList.of(UNBATCHABLE_MUTATIONS_TAG)));
PCollection<Iterable<MutationGroup>> batchedMutations =
filteredMutations
.get(BATCHABLE_MUTATIONS_TAG)
.apply(
"Gather And Sort",
ParDo.of(
new GatherBundleAndSortFn(
spec.getBatchSizeBytes(),
spec.getMaxNumMutations(),
spec.getMaxNumRows(),
spec.getGroupingFactor()
.orElse(
input.isBounded() == IsBounded.BOUNDED
? DEFAULT_GROUPING_FACTOR
: 1),
schemaView))
.withSideInputs(schemaView))
.apply(
"Create Batches",
ParDo.of(
new BatchFn(
spec.getBatchSizeBytes(),
spec.getMaxNumMutations(),
spec.getMaxNumRows(),
schemaView))
.withSideInputs(schemaView));
PCollectionTuple result =
PCollectionList.of(filteredMutations.get(UNBATCHABLE_MUTATIONS_TAG))
.and(batchedMutations)
.apply("Merge", Flatten.pCollections())
.apply(
"Write mutations to Spanner",
ParDo.of(
new WriteToSpannerFn(
spec.getSpannerConfig(), spec.getFailureMode(), FAILED_MUTATIONS_TAG))
.withOutputTags(MAIN_OUT_TAG, TupleTagList.of(FAILED_MUTATIONS_TAG)));
return new SpannerWriteResult(
input.getPipeline(),
result.get(MAIN_OUT_TAG),
result.get(FAILED_MUTATIONS_TAG),
FAILED_MUTATIONS_TAG);
} | class WriteGrouped
extends PTransform<PCollection<MutationGroup>, SpannerWriteResult> {
private final Write spec;
private static final TupleTag<MutationGroup> BATCHABLE_MUTATIONS_TAG =
new TupleTag<MutationGroup>("batchableMutations") {};
private static final TupleTag<Iterable<MutationGroup>> UNBATCHABLE_MUTATIONS_TAG =
new TupleTag<Iterable<MutationGroup>>("unbatchableMutations") {};
private static final TupleTag<Void> MAIN_OUT_TAG = new TupleTag<Void>("mainOut") {};
private static final TupleTag<MutationGroup> FAILED_MUTATIONS_TAG =
new TupleTag<MutationGroup>("failedMutations") {};
private static final SerializableCoder<MutationGroup> CODER =
SerializableCoder.of(MutationGroup.class);
public WriteGrouped(Write spec) {
this.spec = spec;
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
spec.getSpannerConfig().populateDisplayData(builder);
builder.add(
DisplayData.item("batchSizeBytes", spec.getBatchSizeBytes())
.withLabel("Max batch Size in Bytes"));
builder.add(
DisplayData.item("maxNumMutations", spec.getMaxNumMutations())
.withLabel("Max number of mutated cells in batches"));
builder.add(
DisplayData.item("maxNumRows", spec.getMaxNumRows())
.withLabel("Max number of rows in Batches"));
builder.add(
DisplayData.item(
"groupingFactor",
(spec.getGroupingFactor().isPresent()
? Integer.toString(spec.getGroupingFactor().getAsInt())
: "DEFAULT"))
.withLabel("Number of batches to sort over"));
}
@Override
@VisibleForTesting
static MutationGroup decode(byte[] bytes) {
ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
try {
return CODER.decode(bis);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@VisibleForTesting
static byte[] encode(MutationGroup g) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
CODER.encode(g, bos);
} catch (IOException e) {
throw new RuntimeException(e);
}
return bos.toByteArray();
}
} | class WriteGrouped
extends PTransform<PCollection<MutationGroup>, SpannerWriteResult> {
private final Write spec;
private static final TupleTag<MutationGroup> BATCHABLE_MUTATIONS_TAG =
new TupleTag<MutationGroup>("batchableMutations") {};
private static final TupleTag<Iterable<MutationGroup>> UNBATCHABLE_MUTATIONS_TAG =
new TupleTag<Iterable<MutationGroup>>("unbatchableMutations") {};
private static final TupleTag<Void> MAIN_OUT_TAG = new TupleTag<Void>("mainOut") {};
private static final TupleTag<MutationGroup> FAILED_MUTATIONS_TAG =
new TupleTag<MutationGroup>("failedMutations") {};
private static final SerializableCoder<MutationGroup> CODER =
SerializableCoder.of(MutationGroup.class);
public WriteGrouped(Write spec) {
this.spec = spec;
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
spec.populateDisplayDataWithParamaters(builder);
}
@Override
@VisibleForTesting
static MutationGroup decode(byte[] bytes) {
ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
try {
return CODER.decode(bis);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@VisibleForTesting
static byte[] encode(MutationGroup g) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
CODER.encode(g, bos);
} catch (IOException e) {
throw new RuntimeException(e);
}
return bos.toByteArray();
}
} |
Yes. I ran the live test to record the response but found it is no longer a valid test. | static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult2() {
TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(156, 1);
final HealthcareEntity healthcareEntity1 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "six minutes");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.TIME);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 0.87);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 21);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity2 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "minimal");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.CONDITION_QUALIFIER);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 38);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 7);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity3 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "ST depressions in the anterior lateral leads");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 46);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 44);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity5 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "fatigue");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity5, "Fatigue");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 108);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 7);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity6 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity6, "wrist pain");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity6, "Pain in wrist");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity6, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity6, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity6, 120);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity6, 10);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity6,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity7 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity7, "anginal equivalent");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity7, "Anginal equivalent");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity7, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity7, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity7, 137);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity7, 18);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity7,
IterableStream.of(Collections.emptyList()));
final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult("1",
textDocumentStatistics, null);
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult,
new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3,
healthcareEntity5, healthcareEntity6, healthcareEntity7)));
final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Time");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity1);
final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity3);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1,
HealthcareEntityRelationType.TIME_OF_CONDITION);
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1,
IterableStream.of(asList(role1, role2)));
final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Qualifier");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity2);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2,
HealthcareEntityRelationType.QUALIFIER_OF_CONDITION);
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2,
IterableStream.of(asList(role3, role2)));
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult,
IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2)));
return healthcareEntitiesResult;
} | HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "ST depressions in the anterior lateral leads"); | static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult2() {
TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(156, 1);
final HealthcareEntity healthcareEntity1 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "six minutes");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.TIME);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 0.87);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 21);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity2 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "minimal");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.CONDITION_QUALIFIER);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 38);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 7);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity3 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "ST depressions in the anterior lateral leads");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 46);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 44);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity5 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "fatigue");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity5, "Fatigue");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 108);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 7);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity6 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity6, "wrist pain");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity6, "Pain in wrist");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity6, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity6, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity6, 120);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity6, 10);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity6,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity7 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity7, "anginal equivalent");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity7, "Anginal equivalent");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity7, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity7, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity7, 137);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity7, 18);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity7,
IterableStream.of(Collections.emptyList()));
final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult("1",
textDocumentStatistics, null);
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult,
new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3,
healthcareEntity5, healthcareEntity6, healthcareEntity7)));
final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Time");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity1);
final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity3);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1,
HealthcareEntityRelationType.TIME_OF_CONDITION);
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1,
IterableStream.of(asList(role1, role2)));
final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Qualifier");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity2);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2,
HealthcareEntityRelationType.QUALIFIER_OF_CONDITION);
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2,
IterableStream.of(asList(role3, role2)));
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult,
IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2)));
return healthcareEntitiesResult;
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final OffsetDateTime TIME_NOW = OffsetDateTime.now();
static final String INVALID_URL = "htttttttps:
static final String VALID_HTTPS_LOCALHOST = "https:
static final String FAKE_API_KEY = "1234567890";
static final String AZURE_TEXT_ANALYTICS_API_KEY = "AZURE_TEXT_ANALYTICS_API_KEY";
static final String CUSTOM_ACTION_NAME = "customActionName";
static final List<String> CUSTOM_ENTITIES_INPUT = asList(
"David Schmidt, senior vice president--Food Safety, International Food Information Council (IFIC), Washington,"
+ " D.C., discussed the physical activity component.");
static final List<String> CUSTOM_SINGLE_CLASSIFICATION = asList(
"A recent report by the Government Accountability Office (GAO) found that the dramatic increase in oil"
+ " and natural gas development on federal lands over the past six years has stretched the staff of "
+ "the BLM to a point that it has been unable to meet its environmental protection responsibilities.");
static final List<String> CUSTOM_MULTI_CLASSIFICATION = asList(
"I need a reservation for an indoor restaurant in China. Please don't stop the music. Play music and add"
+ " it to my playlist");
static final List<String> SUMMARY_INPUTS = asList(
"At Microsoft, we have been on a quest to advance AI beyond existing techniques, by taking a more holistic,"
+ " human-centric approach to learning and understanding. As Chief Technology Officer of Azure AI "
+ "Cognitive Services, I have been working with a team of amazing scientists and engineers to turn this"
+ " quest into a reality. In my role, I enjoy a unique perspective in viewing the relationship among "
+ "three attributes of human cognition: monolingual text (X), audio or visual sensory signals, (Y) and"
+ " multilingual (Z). At the intersection of all three, there’s magic—what we call XYZ-code as"
+ " illustrated in Figure 1—a joint representation to create more powerful AI that can speak, hear, see,"
+ " and understand humans better. We believe XYZ-code will enable us to fulfill our long-term vision:"
+ " cross-domain transfer learning, spanning modalities and languages. The goal is to have pretrained"
+ " models that can jointly learn representations to support a broad range of downstream AI tasks, much"
+ " in the way humans do today. Over the past five years, we have achieved human performance on benchmarks"
+ " in conversational speech recognition, machine translation, conversational question answering, machine"
+ " reading comprehension, and image captioning. These five breakthroughs provided us with strong signals"
+ " toward our more ambitious aspiration to produce a leap in AI capabilities, achieving multisensory and"
+ " multilingual learning that is closer in line with how humans learn and understand. I believe the joint"
+ " XYZ-code is a foundational component of this aspiration, if grounded with external knowledge sources"
+ " in the downstream AI tasks."
);
static final List<String> SENTIMENT_INPUTS = asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> CATEGORIZED_ENTITY_INPUTS = asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> PII_ENTITY_INPUTS = asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> LINKED_ENTITY_INPUTS = asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final String TOO_LONG_INPUT = "Thisisaveryveryverylongtextwhichgoesonforalongtimeandwhichalmostdoesn'tseemtostopatanygivenpointintime.ThereasonforthistestistotryandseewhathappenswhenwesubmitaveryveryverylongtexttoLanguage.Thisshouldworkjustfinebutjustincaseitisalwaysgoodtohaveatestcase.ThisallowsustotestwhathappensifitisnotOK.Ofcourseitisgoingtobeokbutthenagainitisalsobettertobesure!";
static final List<String> KEY_PHRASE_FRENCH_INPUTS = asList(
"Bonjour tout le monde.",
"Je m'appelle Mondly.");
static final List<String> DETECT_LANGUAGE_INPUTS = asList(
"This is written in English", "Este es un documento escrito en Español.", "~@!~:)");
static final String PII_ENTITY_OFFSET_INPUT = "SSN: 859-98-0987";
static final String SENTIMENT_OFFSET_INPUT = "The hotel was unclean.";
static final String HEALTHCARE_ENTITY_OFFSET_INPUT = "The patient is a 54-year-old";
static final List<String> HEALTHCARE_INPUTS = asList(
"The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.",
"The patient went for six minutes with minimal ST depressions in the anterior lateral leads , thought due to fatigue and wrist pain , his anginal equivalent.");
static final List<String> SPANISH_SAME_AS_ENGLISH_INPUTS = asList("personal", "social");
static final DetectedLanguage DETECTED_LANGUAGE_SPANISH = new DetectedLanguage("Spanish", "es", 1.0, null);
static final DetectedLanguage DETECTED_LANGUAGE_ENGLISH = new DetectedLanguage("English", "en", 1.0, null);
static final List<DetectedLanguage> DETECT_SPANISH_LANGUAGE_RESULTS = asList(
DETECTED_LANGUAGE_SPANISH, DETECTED_LANGUAGE_SPANISH);
static final List<DetectedLanguage> DETECT_ENGLISH_LANGUAGE_RESULTS = asList(
DETECTED_LANGUAGE_ENGLISH, DETECTED_LANGUAGE_ENGLISH);
static final HttpResponseException HTTP_RESPONSE_EXCEPTION_CLASS = new HttpResponseException("", null);
static final String DISPLAY_NAME_WITH_ARGUMENTS = "{displayName} with [{arguments}]";
private static final String AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS =
"AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS";
static List<DetectLanguageInput> getDetectLanguageInputs() {
return asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1), "US"),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<DetectLanguageInput> getDuplicateIdDetectLanguageInputs() {
return asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US")
);
}
static List<TextDocumentInput> getDuplicateTextDocumentInputs() {
return asList(
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0))
);
}
static List<TextDocumentInput> getWarningsTextDocumentInputs() {
return asList(
new TextDocumentInput("0", TOO_LONG_INPUT),
new TextDocumentInput("1", CATEGORIZED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*
* @return A {@link DetectLanguageResultCollection}.
*/
static DetectLanguageResultCollection getExpectedBatchDetectedLanguages() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
final List<DetectLanguageResult> detectLanguageResultList = asList(
new DetectLanguageResult("0", new TextDocumentStatistics(26, 1), null, getDetectedLanguageEnglish()),
new DetectLanguageResult("1", new TextDocumentStatistics(40, 1), null, getDetectedLanguageSpanish()),
new DetectLanguageResult("2", new TextDocumentStatistics(6, 1), null, getUnknownDetectedLanguage()));
return new DetectLanguageResultCollection(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
static DetectedLanguage getDetectedLanguageEnglish() {
return new DetectedLanguage("English", "en", 0.0, null);
}
static DetectedLanguage getDetectedLanguageSpanish() {
return new DetectedLanguage("Spanish", "es", 0.0, null);
}
static DetectedLanguage getUnknownDetectedLanguage() {
return new DetectedLanguage("(Unknown)", "(Unknown)", 0.0, null);
}
/**
* Helper method to get the expected Batch Categorized Entities
*
* @return A {@link RecognizeEntitiesResultCollection}.
*/
static RecognizeEntitiesResultCollection getExpectedBatchCategorizedEntities() {
return new RecognizeEntitiesResultCollection(
asList(getExpectedBatchCategorizedEntities1(), getExpectedBatchCategorizedEntities2()),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Categorized Entities List 1
*/
static List<CategorizedEntity> getCategorizedEntitiesList1() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("trip", EntityCategory.EVENT, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 18);
CategorizedEntity categorizedEntity2 = new CategorizedEntity("Seattle", EntityCategory.LOCATION, "GPE", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 26);
CategorizedEntity categorizedEntity3 = new CategorizedEntity("last week", EntityCategory.DATE_TIME, "DateRange", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 34);
return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3);
}
/**
* Helper method to get the expected Categorized Entities List 2
*/
static List<CategorizedEntity> getCategorizedEntitiesList2() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 10);
return asList(categorizedEntity1);
}
/**
* Helper method to get the expected Categorized entity result for PII document input.
*/
static List<CategorizedEntity> getCategorizedEntitiesForPiiInput() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 0);
CategorizedEntity categorizedEntity2 = new CategorizedEntity("employee", EntityCategory.PERSON_TYPE, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 10);
CategorizedEntity categorizedEntity3 = new CategorizedEntity("859", EntityCategory.QUANTITY, "Number", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 28);
CategorizedEntity categorizedEntity4 = new CategorizedEntity("98", EntityCategory.QUANTITY, "Number", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity4, 32);
CategorizedEntity categorizedEntity5 = new CategorizedEntity("0987", EntityCategory.QUANTITY, "Number", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity5, 35);
CategorizedEntity categorizedEntity6 = new CategorizedEntity("API", EntityCategory.SKILL, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity6, 61);
return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3, categorizedEntity4, categorizedEntity5, categorizedEntity6);
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities1() {
IterableStream<CategorizedEntity> categorizedEntityList1 = new IterableStream<>(getCategorizedEntitiesList1());
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, new CategorizedEntityCollection(categorizedEntityList1, null));
return recognizeEntitiesResult1;
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities2() {
IterableStream<CategorizedEntity> categorizedEntityList2 = new IterableStream<>(getCategorizedEntitiesList2());
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, new CategorizedEntityCollection(categorizedEntityList2, null));
return recognizeEntitiesResult2;
}
/**
* Helper method to get the expected batch of Personally Identifiable Information entities
*/
static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntities() {
PiiEntityCollection piiEntityCollection = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()),
"********* ******** with ssn *********** is using our awesome API's.", null);
PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList2()),
"Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2);
return new RecognizePiiEntitiesResultCollection(
asList(recognizeEntitiesResult1, recognizeEntitiesResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected batch of Personally Identifiable Information entities for domain filter
*/
static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForDomainFilter() {
PiiEntityCollection piiEntityCollection = new PiiEntityCollection(
new IterableStream<>(getPiiEntitiesList1ForDomainFilter()),
"********* employee with ssn *********** is using our awesome API's.", null);
PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(
new IterableStream<>(Arrays.asList(getPiiEntitiesList2().get(0), getPiiEntitiesList2().get(1), getPiiEntitiesList2().get(2))),
"Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2);
return new RecognizePiiEntitiesResultCollection(
asList(recognizeEntitiesResult1, recognizeEntitiesResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Categorized Entities List 1
*/
static List<PiiEntity> getPiiEntitiesList1() {
final PiiEntity piiEntity0 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity0, "Microsoft");
PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.ORGANIZATION);
PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null);
PiiEntityPropertiesHelper.setOffset(piiEntity0, 0);
final PiiEntity piiEntity1 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity1, "employee");
PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.fromString("PersonType"));
PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null);
PiiEntityPropertiesHelper.setOffset(piiEntity1, 10);
final PiiEntity piiEntity2 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity2, "859-98-0987");
PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.US_SOCIAL_SECURITY_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null);
PiiEntityPropertiesHelper.setOffset(piiEntity2, 28);
return asList(piiEntity0, piiEntity1, piiEntity2);
}
static List<PiiEntity> getPiiEntitiesList1ForDomainFilter() {
return Arrays.asList(getPiiEntitiesList1().get(0), getPiiEntitiesList1().get(2));
}
/**
* Helper method to get the expected Categorized Entities List 2
*/
static List<PiiEntity> getPiiEntitiesList2() {
String expectedText = "111000025";
final PiiEntity piiEntity0 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity0, expectedText);
PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.PHONE_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null);
PiiEntityPropertiesHelper.setConfidenceScore(piiEntity0, 0.8);
PiiEntityPropertiesHelper.setOffset(piiEntity0, 18);
final PiiEntity piiEntity1 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity1, expectedText);
PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.ABA_ROUTING_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null);
PiiEntityPropertiesHelper.setConfidenceScore(piiEntity1, 0.75);
PiiEntityPropertiesHelper.setOffset(piiEntity1, 18);
final PiiEntity piiEntity2 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity2, expectedText);
PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.NZ_SOCIAL_WELFARE_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null);
PiiEntityPropertiesHelper.setConfidenceScore(piiEntity2, 0.65);
PiiEntityPropertiesHelper.setOffset(piiEntity2, 18);
return asList(piiEntity0, piiEntity1, piiEntity2);
}
/**
* Helper method to get the expected batch of Personally Identifiable Information entities for categories filter
*/
static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForCategoriesFilter() {
PiiEntityCollection piiEntityCollection = new PiiEntityCollection(
new IterableStream<>(asList(getPiiEntitiesList1().get(2))),
"Microsoft employee with ssn *********** is using our awesome API's.", null);
PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(
new IterableStream<>(asList(getPiiEntitiesList2().get(1))),
"Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", null, null, piiEntityCollection);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", null, null, piiEntityCollection2);
return new RecognizePiiEntitiesResultCollection(
asList(recognizeEntitiesResult1, recognizeEntitiesResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Batch Linked Entities
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*/
static RecognizeLinkedEntitiesResultCollection getExpectedBatchLinkedEntities() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
final List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList =
asList(
new RecognizeLinkedEntitiesResult(
"0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult(
"1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null)));
return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected linked Entities List 1
*/
static List<LinkedEntity> getLinkedEntitiesList1() {
final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 26);
LinkedEntity linkedEntity = new LinkedEntity(
"Seattle", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "5fbba6b8-85e1-4d41-9444-d9055436e473");
return asList(linkedEntity);
}
/**
* Helper method to get the expected linked Entities List 2
*/
static List<LinkedEntity> getLinkedEntitiesList2() {
LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 10);
LinkedEntity linkedEntity = new LinkedEntity(
"Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Microsoft", "https:
"Wikipedia");
LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85");
return asList(linkedEntity);
}
static List<LinkedEntity> getLinkedEntitiesList3() {
LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 0);
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("API's", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch1, 61);
LinkedEntity linkedEntity = new LinkedEntity(
"Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Microsoft", "https:
"Wikipedia");
LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85");
LinkedEntity linkedEntity1 = new LinkedEntity(
"Application programming interface", new IterableStream<>(Collections.singletonList(linkedEntityMatch1)),
"en", "Application programming interface",
"https:
"Wikipedia");
return asList(linkedEntity, linkedEntity1);
}
/**
* Helper method to get the expected Batch Key Phrases.
*/
static ExtractKeyPhrasesResultCollection getExpectedBatchKeyPhrases() {
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, new KeyPhrasesCollection(new IterableStream<>(asList("Hello world", "input text")), null));
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, new KeyPhrasesCollection(new IterableStream<>(asList("Bonjour", "monde")), null));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static AnalyzeSentimentResultCollection getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(67, 1);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0",
textDocumentStatistics, null, getExpectedDocumentSentiment());
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1",
textDocumentStatistics, null, getExpectedDocumentSentiment2());
return new AnalyzeSentimentResultCollection(
asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method that get the first expected DocumentSentiment result.
*/
static DocumentSentiment getExpectedDocumentSentiment() {
final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 14);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0);
final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 23);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0);
final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 51);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0);
final TargetSentiment targetSentiment1 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment1, "hotel");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.NEGATIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 4);
final SentenceOpinion sentenceOpinion1 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1,
new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2)));
final TargetSentiment targetSentiment2 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment2, "gnocchi");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.POSITIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 59);
final SentenceOpinion sentenceOpinion2 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2,
new IterableStream<>(asList(assessmentSentiment3)));
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"The hotel was dark and unclean.", TextSentiment.NEGATIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 31);
final SentenceSentiment sentenceSentiment2 = new SentenceSentiment(
"The restaurant had amazing gnocchi.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 32);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 35);
return new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)),
null);
}
/**
* Helper method that get the second expected DocumentSentiment result.
*/
static DocumentSentiment getExpectedDocumentSentiment2() {
final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 50);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0);
final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 59);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0);
final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 19);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0);
final TargetSentiment targetSentiment1 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment1, "gnocchi");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.POSITIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 27);
final SentenceOpinion sentenceOpinion1 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1,
new IterableStream<>(asList(assessmentSentiment3)));
final TargetSentiment targetSentiment2 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment2, "hotel");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.NEGATIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 40);
final SentenceOpinion sentenceOpinion2 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2,
new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2)));
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"The restaurant had amazing gnocchi.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 35);
final SentenceSentiment sentenceSentiment2 = new SentenceSentiment(
"The hotel was dark and unclean.", TextSentiment.NEGATIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 36);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 31);
return new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)),
null);
}
/*
* This is the expected result for testing an input:
* "I had a wonderful trip to Seattle last week."
*/
static DocumentSentiment getExpectedDocumentSentimentForActions() {
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"I had a wonderful trip to Seattle last week.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null);
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 44);
return new DocumentSentiment(TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1)),
null);
}
/*
* This is the expected result for testing an input:
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static DocumentSentiment getExpectedDocumentSentimentForActions2() {
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null);
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 67);
return new DocumentSentiment(TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1)),
null);
}
/**
* Helper method that get a single-page {@link AnalyzeHealthcareEntitiesResultCollection} list.
*/
static List<AnalyzeHealthcareEntitiesResultCollection>
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage() {
return asList(
getExpectedAnalyzeHealthcareEntitiesResultCollection(2,
asList(getRecognizeHealthcareEntitiesResult1("0"), getRecognizeHealthcareEntitiesResult2())));
}
/**
* Helper method that get a multiple-pages {@link AnalyzeHealthcareEntitiesResultCollection} list.
*/
static List<AnalyzeHealthcareEntitiesResultCollection>
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForMultiplePages(int startIndex, int firstPage,
int secondPage) {
List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults1 = new ArrayList<>();
int i = startIndex;
for (; i < startIndex + firstPage; i++) {
healthcareEntitiesResults1.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i)));
}
List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults2 = new ArrayList<>();
for (; i < startIndex + firstPage + secondPage; i++) {
healthcareEntitiesResults2.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i)));
}
List<AnalyzeHealthcareEntitiesResultCollection> result = new ArrayList<>();
result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(firstPage, healthcareEntitiesResults1));
if (secondPage != 0) {
result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(secondPage, healthcareEntitiesResults2));
}
return result;
}
/**
* Helper method that get the expected {@link AnalyzeHealthcareEntitiesResultCollection} result.
*
* @param sizePerPage batch size per page.
* @param healthcareEntitiesResults a collection of {@link AnalyzeHealthcareEntitiesResult}.
*/
static AnalyzeHealthcareEntitiesResultCollection getExpectedAnalyzeHealthcareEntitiesResultCollection(
int sizePerPage, List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults) {
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(
sizePerPage, sizePerPage, 0, sizePerPage);
final AnalyzeHealthcareEntitiesResultCollection analyzeHealthcareEntitiesResultCollection =
new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(healthcareEntitiesResults));
AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setModelVersion(analyzeHealthcareEntitiesResultCollection, "2020-09-03");
AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setStatistics(analyzeHealthcareEntitiesResultCollection,
textDocumentBatchStatistics);
return analyzeHealthcareEntitiesResultCollection;
}
/**
* Result for
* "The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.",
*/
static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult1(String documentId) {
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(105, 1);
final HealthcareEntity healthcareEntity1 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "54-year-old");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.AGE);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 17);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity2 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "gentleman");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity2, "Male population group");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.GENDER);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 29);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 9);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2,
IterableStream.of(Collections.emptyList()));
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity3 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "progressive");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.fromString("Course"));
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 0.91);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 57);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 11);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity4 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity4, "angina");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity4, "Angina Pectoris");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity4, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity4, 0.81);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity4, 69);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity4, 6);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4,
IterableStream.of(Collections.emptyList()));
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity5 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "past several months");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.TIME);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 85);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 19);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5,
IterableStream.of(Collections.emptyList()));
final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult1 = new AnalyzeHealthcareEntitiesResult(documentId,
textDocumentStatistics1, null);
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult1,
new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3, healthcareEntity4,
healthcareEntity5)));
final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Course");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity3);
final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity4);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1,
HealthcareEntityRelationType.fromString("CourseOfCondition"));
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1,
IterableStream.of(asList(role1, role2)));
final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Time");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity5);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2,
HealthcareEntityRelationType.TIME_OF_CONDITION);
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2,
IterableStream.of(asList(role2, role3)));
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult1,
IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2)));
return healthcareEntitiesResult1;
}
/**
* Result for
* "The patient went for six minutes with minimal ST depressions in the anterior lateral leads ,
* thought due to fatigue and wrist pain , his anginal equivalent."
*/
/**
* RecognizeEntitiesResultCollection result for
* "I had a wonderful trip to Seattle last week."
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollection() {
return new RecognizeEntitiesResultCollection(
asList(new RecognizeEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesList1()), null)),
new RecognizeEntitiesResult("1", new TextDocumentStatistics(67, 1), null,
new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null))
),
"2020-04-01",
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* RecognizePiiEntitiesResultCollection result for
* "I had a wonderful trip to Seattle last week."
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollection() {
final PiiEntity piiEntity0 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity0, "last week");
PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.fromString("DateTime"));
PiiEntityPropertiesHelper.setSubcategory(piiEntity0, "DateRange");
PiiEntityPropertiesHelper.setOffset(piiEntity0, 34);
return new RecognizePiiEntitiesResultCollection(
asList(
new RecognizePiiEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new PiiEntityCollection(new IterableStream<>(Arrays.asList(piiEntity0)),
"I had a wonderful trip to Seattle *********.", null)),
new RecognizePiiEntitiesResult("1", new TextDocumentStatistics(67, 1), null,
new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()),
"********* ******** with ssn *********** is using our awesome API's.", null))),
"2020-07-01",
new TextDocumentBatchStatistics(2, 2, 0, 2)
);
}
/**
* ExtractKeyPhrasesResultCollection result for
* "I had a wonderful trip to Seattle last week."
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollection() {
return new ExtractKeyPhrasesResultCollection(
asList(new ExtractKeyPhraseResult("0", new TextDocumentStatistics(44, 1),
null, new KeyPhrasesCollection(new IterableStream<>(asList("wonderful trip", "Seattle")), null)),
new ExtractKeyPhraseResult("1", new TextDocumentStatistics(67, 1),
null, new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")), null))),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollection() {
return new RecognizeLinkedEntitiesResultCollection(
asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null))
),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForActions() {
return new RecognizeLinkedEntitiesResultCollection(
asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null))
),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForActions() {
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0",
null, null, getExpectedDocumentSentimentForActions());
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1",
null, null, getExpectedDocumentSentimentForActions2());
return new AnalyzeSentimentResultCollection(
asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static RecognizeEntitiesActionResult getExpectedRecognizeEntitiesActionResult(boolean isError, String actionName,
OffsetDateTime completeAt, RecognizeEntitiesResultCollection resultCollection, TextAnalyticsError actionError) {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static RecognizePiiEntitiesActionResult getExpectedRecognizePiiEntitiesActionResult(boolean isError,
String actionName, OffsetDateTime completedAt, RecognizePiiEntitiesResultCollection resultCollection,
TextAnalyticsError actionError) {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static ExtractKeyPhrasesActionResult getExpectedExtractKeyPhrasesActionResult(boolean isError, String actionName,
OffsetDateTime completedAt, ExtractKeyPhrasesResultCollection resultCollection,
TextAnalyticsError actionError) {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static RecognizeLinkedEntitiesActionResult getExpectedRecognizeLinkedEntitiesActionResult(boolean isError,
String actionName, OffsetDateTime completeAt, RecognizeLinkedEntitiesResultCollection resultCollection,
TextAnalyticsError actionError) {
RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult();
RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static AnalyzeSentimentActionResult getExpectedAnalyzeSentimentActionResult(boolean isError, String actionName,
OffsetDateTime completeAt, AnalyzeSentimentResultCollection resultCollection, TextAnalyticsError actionError) {
AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult();
AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
/**
* Helper method that get the expected AnalyzeBatchActionsResult result.
*/
static AnalyzeActionsResult getExpectedAnalyzeBatchActionsResult(
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults,
IterableStream<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults,
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults,
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults,
IterableStream<AnalyzeSentimentActionResult> analyzeSentimentActionResults) {
final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult();
AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult,
recognizeEntitiesActionResults);
AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult,
extractKeyPhrasesActionResults);
AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult,
recognizeLinkedEntitiesActionResults);
AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult,
analyzeSentimentActionResults);
return analyzeActionsResult;
}
/**
* CategorizedEntityCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollectionForPagination(int startIndex,
int documentCount) {
List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
recognizeEntitiesResults.add(new RecognizeEntitiesResult(Integer.toString(i), null, null,
new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null)));
}
return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, "2020-04-01",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount));
}
/**
* RecognizePiiEntitiesResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollectionForPagination(int startIndex,
int documentCount) {
List<RecognizePiiEntitiesResult> recognizePiiEntitiesResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
recognizePiiEntitiesResults.add(new RecognizePiiEntitiesResult(Integer.toString(i), null, null,
new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()),
"********* ******** with ssn *********** is using our awesome API's.", null)));
}
return new RecognizePiiEntitiesResultCollection(recognizePiiEntitiesResults, "2020-07-01",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)
);
}
/**
* ExtractKeyPhrasesResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollectionForPagination(int startIndex,
int documentCount) {
List<ExtractKeyPhraseResult> extractKeyPhraseResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
extractKeyPhraseResults.add(new ExtractKeyPhraseResult(Integer.toString(i), null, null,
new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")),
null)));
}
return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResults, "2020-07-01",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount));
}
/**
* RecognizeLinkedEntitiesResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForPagination(
int startIndex, int documentCount) {
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
recognizeLinkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(Integer.toString(i), null, null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null)));
}
return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResults, "",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)
);
}
/**
* AnalyzeSentimentResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForPagination(
int startIndex, int documentCount) {
List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
analyzeSentimentResults.add(new AnalyzeSentimentResult(Integer.toString(i), null, null,
getExpectedDocumentSentimentForActions2()));
}
return new AnalyzeSentimentResultCollection(analyzeSentimentResults, "",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)
);
}
/**
* Helper method that get a multiple-pages (AnalyzeActionsResult) list.
*/
static List<AnalyzeActionsResult> getExpectedAnalyzeActionsResultListForMultiplePages(int startIndex,
int firstPage, int secondPage) {
List<AnalyzeActionsResult> analyzeActionsResults = new ArrayList<>();
analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(
false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(
false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, firstPage), null)))
));
startIndex += firstPage;
analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(
false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(
false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, secondPage), null)))
));
return analyzeActionsResults;
}
/**
* Helper method that get a customized TextAnalyticsError.
*/
static TextAnalyticsError getActionError(TextAnalyticsErrorCode errorCode, String taskName, String index) {
return new TextAnalyticsError(errorCode, "", "
}
/**
* Returns a stream of arguments that includes all combinations of eligible {@link HttpClient HttpClients} and
* service versions that should be tested.
*
* @return A stream of HttpClient and service version combinations to test.
*/
static Stream<Arguments> getTestParameters() {
List<Arguments> argumentsList = new ArrayList<>();
getHttpClients()
.forEach(httpClient -> {
Arrays.stream(TextAnalyticsServiceVersion.values()).filter(
TestUtils::shouldServiceVersionBeTested)
.forEach(serviceVersion -> argumentsList.add(Arguments.of(httpClient, serviceVersion)));
});
return argumentsList.stream();
}
/**
* Returns whether the given service version match the rules of test framework.
*
* <ul>
* <li>Using latest service version as default if no environment variable is set.</li>
* <li>If it's set to ALL, all Service versions in {@link TextAnalyticsServiceVersion} will be tested.</li>
* <li>Otherwise, Service version string should match env variable.</li>
* </ul>
*
* Environment values currently supported are: "ALL", "${version}".
* Use comma to separate http clients want to test.
* e.g. {@code set AZURE_TEST_SERVICE_VERSIONS = V1_0, V2_0}
*
* @param serviceVersion ServiceVersion needs to check
* @return Boolean indicates whether filters out the service version or not.
*/
private static boolean shouldServiceVersionBeTested(TextAnalyticsServiceVersion serviceVersion) {
String serviceVersionFromEnv =
Configuration.getGlobalConfiguration().get(AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS);
if (CoreUtils.isNullOrEmpty(serviceVersionFromEnv)) {
return TextAnalyticsServiceVersion.getLatest().equals(serviceVersion);
}
if (AZURE_TEST_SERVICE_VERSIONS_VALUE_ALL.equalsIgnoreCase(serviceVersionFromEnv)) {
return true;
}
String[] configuredServiceVersionList = serviceVersionFromEnv.split(",");
return Arrays.stream(configuredServiceVersionList).anyMatch(configuredServiceVersion ->
serviceVersion.getVersion().equals(configuredServiceVersion.trim()));
}
private TestUtils() {
}
} | class TestUtils {
private static final String DEFAULT_MODEL_VERSION = "2019-10-01";
static final OffsetDateTime TIME_NOW = OffsetDateTime.now();
static final String INVALID_URL = "htttttttps:
static final String VALID_HTTPS_LOCALHOST = "https:
static final String FAKE_API_KEY = "1234567890";
static final String AZURE_TEXT_ANALYTICS_API_KEY = "AZURE_TEXT_ANALYTICS_API_KEY";
static final String CUSTOM_ACTION_NAME = "customActionName";
static final List<String> CUSTOM_ENTITIES_INPUT = asList(
"David Schmidt, senior vice president--Food Safety, International Food Information Council (IFIC), Washington,"
+ " D.C., discussed the physical activity component.");
static final List<String> CUSTOM_SINGLE_CLASSIFICATION = asList(
"A recent report by the Government Accountability Office (GAO) found that the dramatic increase in oil"
+ " and natural gas development on federal lands over the past six years has stretched the staff of "
+ "the BLM to a point that it has been unable to meet its environmental protection responsibilities.");
static final List<String> CUSTOM_MULTI_CLASSIFICATION = asList(
"I need a reservation for an indoor restaurant in China. Please don't stop the music. Play music and add"
+ " it to my playlist");
static final List<String> SUMMARY_INPUTS = asList(
"At Microsoft, we have been on a quest to advance AI beyond existing techniques, by taking a more holistic,"
+ " human-centric approach to learning and understanding. As Chief Technology Officer of Azure AI "
+ "Cognitive Services, I have been working with a team of amazing scientists and engineers to turn this"
+ " quest into a reality. In my role, I enjoy a unique perspective in viewing the relationship among "
+ "three attributes of human cognition: monolingual text (X), audio or visual sensory signals, (Y) and"
+ " multilingual (Z). At the intersection of all three, there’s magic—what we call XYZ-code as"
+ " illustrated in Figure 1—a joint representation to create more powerful AI that can speak, hear, see,"
+ " and understand humans better. We believe XYZ-code will enable us to fulfill our long-term vision:"
+ " cross-domain transfer learning, spanning modalities and languages. The goal is to have pretrained"
+ " models that can jointly learn representations to support a broad range of downstream AI tasks, much"
+ " in the way humans do today. Over the past five years, we have achieved human performance on benchmarks"
+ " in conversational speech recognition, machine translation, conversational question answering, machine"
+ " reading comprehension, and image captioning. These five breakthroughs provided us with strong signals"
+ " toward our more ambitious aspiration to produce a leap in AI capabilities, achieving multisensory and"
+ " multilingual learning that is closer in line with how humans learn and understand. I believe the joint"
+ " XYZ-code is a foundational component of this aspiration, if grounded with external knowledge sources"
+ " in the downstream AI tasks."
);
static final List<String> SENTIMENT_INPUTS = asList(
"The hotel was dark and unclean. The restaurant had amazing gnocchi.",
"The restaurant had amazing gnocchi. The hotel was dark and unclean.");
static final List<String> CATEGORIZED_ENTITY_INPUTS = asList(
"I had a wonderful trip to Seattle last week.", "I work at Microsoft.");
static final List<String> PII_ENTITY_INPUTS = asList(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.",
"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.");
static final List<String> LINKED_ENTITY_INPUTS = asList(
"I had a wonderful trip to Seattle last week.",
"I work at Microsoft.");
static final List<String> KEY_PHRASE_INPUTS = asList(
"Hello world. This is some input text that I love.",
"Bonjour tout le monde");
static final String TOO_LONG_INPUT = "Thisisaveryveryverylongtextwhichgoesonforalongtimeandwhichalmostdoesn'tseemtostopatanygivenpointintime.ThereasonforthistestistotryandseewhathappenswhenwesubmitaveryveryverylongtexttoLanguage.Thisshouldworkjustfinebutjustincaseitisalwaysgoodtohaveatestcase.ThisallowsustotestwhathappensifitisnotOK.Ofcourseitisgoingtobeokbutthenagainitisalsobettertobesure!";
static final List<String> KEY_PHRASE_FRENCH_INPUTS = asList(
"Bonjour tout le monde.",
"Je m'appelle Mondly.");
static final List<String> DETECT_LANGUAGE_INPUTS = asList(
"This is written in English", "Este es un documento escrito en Español.", "~@!~:)");
static final String PII_ENTITY_OFFSET_INPUT = "SSN: 859-98-0987";
static final String SENTIMENT_OFFSET_INPUT = "The hotel was unclean.";
static final String HEALTHCARE_ENTITY_OFFSET_INPUT = "The patient is a 54-year-old";
static final List<String> HEALTHCARE_INPUTS = asList(
"The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.",
"The patient went for six minutes with minimal ST depressions in the anterior lateral leads , thought due to fatigue and wrist pain , his anginal equivalent.");
static final List<String> SPANISH_SAME_AS_ENGLISH_INPUTS = asList("personal", "social");
static final DetectedLanguage DETECTED_LANGUAGE_SPANISH = new DetectedLanguage("Spanish", "es", 1.0, null);
static final DetectedLanguage DETECTED_LANGUAGE_ENGLISH = new DetectedLanguage("English", "en", 1.0, null);
static final List<DetectedLanguage> DETECT_SPANISH_LANGUAGE_RESULTS = asList(
DETECTED_LANGUAGE_SPANISH, DETECTED_LANGUAGE_SPANISH);
static final List<DetectedLanguage> DETECT_ENGLISH_LANGUAGE_RESULTS = asList(
DETECTED_LANGUAGE_ENGLISH, DETECTED_LANGUAGE_ENGLISH);
static final HttpResponseException HTTP_RESPONSE_EXCEPTION_CLASS = new HttpResponseException("", null);
static final String DISPLAY_NAME_WITH_ARGUMENTS = "{displayName} with [{arguments}]";
private static final String AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS =
"AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS";
static List<DetectLanguageInput> getDetectLanguageInputs() {
return asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("1", DETECT_LANGUAGE_INPUTS.get(1), "US"),
new DetectLanguageInput("2", DETECT_LANGUAGE_INPUTS.get(2), "US")
);
}
static List<DetectLanguageInput> getDuplicateIdDetectLanguageInputs() {
return asList(
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US"),
new DetectLanguageInput("0", DETECT_LANGUAGE_INPUTS.get(0), "US")
);
}
static List<TextDocumentInput> getDuplicateTextDocumentInputs() {
return asList(
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0)),
new TextDocumentInput("0", CATEGORIZED_ENTITY_INPUTS.get(0))
);
}
static List<TextDocumentInput> getWarningsTextDocumentInputs() {
return asList(
new TextDocumentInput("0", TOO_LONG_INPUT),
new TextDocumentInput("1", CATEGORIZED_ENTITY_INPUTS.get(1))
);
}
static List<TextDocumentInput> getTextDocumentInputs(List<String> inputs) {
return IntStream.range(0, inputs.size())
.mapToObj(index ->
new TextDocumentInput(String.valueOf(index), inputs.get(index)))
.collect(Collectors.toList());
}
/**
* Helper method to get the expected Batch Detected Languages
*
* @return A {@link DetectLanguageResultCollection}.
*/
static DetectLanguageResultCollection getExpectedBatchDetectedLanguages() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(3, 3, 0, 3);
final List<DetectLanguageResult> detectLanguageResultList = asList(
new DetectLanguageResult("0", new TextDocumentStatistics(26, 1), null, getDetectedLanguageEnglish()),
new DetectLanguageResult("1", new TextDocumentStatistics(40, 1), null, getDetectedLanguageSpanish()),
new DetectLanguageResult("2", new TextDocumentStatistics(6, 1), null, getUnknownDetectedLanguage()));
return new DetectLanguageResultCollection(detectLanguageResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
static DetectedLanguage getDetectedLanguageEnglish() {
return new DetectedLanguage("English", "en", 0.0, null);
}
static DetectedLanguage getDetectedLanguageSpanish() {
return new DetectedLanguage("Spanish", "es", 0.0, null);
}
static DetectedLanguage getUnknownDetectedLanguage() {
return new DetectedLanguage("(Unknown)", "(Unknown)", 0.0, null);
}
/**
* Helper method to get the expected Batch Categorized Entities
*
* @return A {@link RecognizeEntitiesResultCollection}.
*/
static RecognizeEntitiesResultCollection getExpectedBatchCategorizedEntities() {
return new RecognizeEntitiesResultCollection(
asList(getExpectedBatchCategorizedEntities1(), getExpectedBatchCategorizedEntities2()),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Categorized Entities List 1
*/
static List<CategorizedEntity> getCategorizedEntitiesList1() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("trip", EntityCategory.EVENT, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 18);
CategorizedEntity categorizedEntity2 = new CategorizedEntity("Seattle", EntityCategory.LOCATION, "GPE", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 26);
CategorizedEntity categorizedEntity3 = new CategorizedEntity("last week", EntityCategory.DATE_TIME, "DateRange", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 34);
return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3);
}
/**
* Helper method to get the expected Categorized Entities List 2
*/
static List<CategorizedEntity> getCategorizedEntitiesList2() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 10);
return asList(categorizedEntity1);
}
/**
* Helper method to get the expected Categorized entity result for PII document input.
*/
static List<CategorizedEntity> getCategorizedEntitiesForPiiInput() {
CategorizedEntity categorizedEntity1 = new CategorizedEntity("Microsoft", EntityCategory.ORGANIZATION, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity1, 0);
CategorizedEntity categorizedEntity2 = new CategorizedEntity("employee", EntityCategory.PERSON_TYPE, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity2, 10);
CategorizedEntity categorizedEntity3 = new CategorizedEntity("859", EntityCategory.QUANTITY, "Number", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity3, 28);
CategorizedEntity categorizedEntity4 = new CategorizedEntity("98", EntityCategory.QUANTITY, "Number", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity4, 32);
CategorizedEntity categorizedEntity5 = new CategorizedEntity("0987", EntityCategory.QUANTITY, "Number", 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity5, 35);
CategorizedEntity categorizedEntity6 = new CategorizedEntity("API", EntityCategory.SKILL, null, 0.0);
CategorizedEntityPropertiesHelper.setOffset(categorizedEntity6, 61);
return asList(categorizedEntity1, categorizedEntity2, categorizedEntity3, categorizedEntity4, categorizedEntity5, categorizedEntity6);
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities1() {
IterableStream<CategorizedEntity> categorizedEntityList1 = new IterableStream<>(getCategorizedEntitiesList1());
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(44, 1);
RecognizeEntitiesResult recognizeEntitiesResult1 = new RecognizeEntitiesResult("0", textDocumentStatistics1, null, new CategorizedEntityCollection(categorizedEntityList1, null));
return recognizeEntitiesResult1;
}
/**
* Helper method to get the expected Batch Categorized Entities
*/
static RecognizeEntitiesResult getExpectedBatchCategorizedEntities2() {
IterableStream<CategorizedEntity> categorizedEntityList2 = new IterableStream<>(getCategorizedEntitiesList2());
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(20, 1);
RecognizeEntitiesResult recognizeEntitiesResult2 = new RecognizeEntitiesResult("1", textDocumentStatistics2, null, new CategorizedEntityCollection(categorizedEntityList2, null));
return recognizeEntitiesResult2;
}
/**
* Helper method to get the expected batch of Personally Identifiable Information entities
*/
static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntities() {
PiiEntityCollection piiEntityCollection = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()),
"********* ******** with ssn *********** is using our awesome API's.", null);
PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList2()),
"Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2);
return new RecognizePiiEntitiesResultCollection(
asList(recognizeEntitiesResult1, recognizeEntitiesResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected batch of Personally Identifiable Information entities for domain filter
*/
static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForDomainFilter() {
PiiEntityCollection piiEntityCollection = new PiiEntityCollection(
new IterableStream<>(getPiiEntitiesList1ForDomainFilter()),
"********* employee with ssn *********** is using our awesome API's.", null);
PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(
new IterableStream<>(Arrays.asList(getPiiEntitiesList2().get(0), getPiiEntitiesList2().get(1), getPiiEntitiesList2().get(2))),
"Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null);
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(67, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(105, 1);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", textDocumentStatistics1, null, piiEntityCollection);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", textDocumentStatistics2, null, piiEntityCollection2);
return new RecognizePiiEntitiesResultCollection(
asList(recognizeEntitiesResult1, recognizeEntitiesResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Categorized Entities List 1
*/
static List<PiiEntity> getPiiEntitiesList1() {
final PiiEntity piiEntity0 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity0, "Microsoft");
PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.ORGANIZATION);
PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null);
PiiEntityPropertiesHelper.setOffset(piiEntity0, 0);
final PiiEntity piiEntity1 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity1, "employee");
PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.fromString("PersonType"));
PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null);
PiiEntityPropertiesHelper.setOffset(piiEntity1, 10);
final PiiEntity piiEntity2 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity2, "859-98-0987");
PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.US_SOCIAL_SECURITY_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null);
PiiEntityPropertiesHelper.setOffset(piiEntity2, 28);
return asList(piiEntity0, piiEntity1, piiEntity2);
}
static List<PiiEntity> getPiiEntitiesList1ForDomainFilter() {
return Arrays.asList(getPiiEntitiesList1().get(0), getPiiEntitiesList1().get(2));
}
/**
* Helper method to get the expected Categorized Entities List 2
*/
static List<PiiEntity> getPiiEntitiesList2() {
String expectedText = "111000025";
final PiiEntity piiEntity0 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity0, expectedText);
PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.PHONE_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity0, null);
PiiEntityPropertiesHelper.setConfidenceScore(piiEntity0, 0.8);
PiiEntityPropertiesHelper.setOffset(piiEntity0, 18);
final PiiEntity piiEntity1 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity1, expectedText);
PiiEntityPropertiesHelper.setCategory(piiEntity1, PiiEntityCategory.ABA_ROUTING_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity1, null);
PiiEntityPropertiesHelper.setConfidenceScore(piiEntity1, 0.75);
PiiEntityPropertiesHelper.setOffset(piiEntity1, 18);
final PiiEntity piiEntity2 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity2, expectedText);
PiiEntityPropertiesHelper.setCategory(piiEntity2, PiiEntityCategory.NZ_SOCIAL_WELFARE_NUMBER);
PiiEntityPropertiesHelper.setSubcategory(piiEntity2, null);
PiiEntityPropertiesHelper.setConfidenceScore(piiEntity2, 0.65);
PiiEntityPropertiesHelper.setOffset(piiEntity2, 18);
return asList(piiEntity0, piiEntity1, piiEntity2);
}
/**
* Helper method to get the expected batch of Personally Identifiable Information entities for categories filter
*/
static RecognizePiiEntitiesResultCollection getExpectedBatchPiiEntitiesForCategoriesFilter() {
PiiEntityCollection piiEntityCollection = new PiiEntityCollection(
new IterableStream<>(asList(getPiiEntitiesList1().get(2))),
"Microsoft employee with ssn *********** is using our awesome API's.", null);
PiiEntityCollection piiEntityCollection2 = new PiiEntityCollection(
new IterableStream<>(asList(getPiiEntitiesList2().get(1))),
"Your ABA number - ********* - is the first 9 digits in the lower left hand corner of your personal check.", null);
RecognizePiiEntitiesResult recognizeEntitiesResult1 = new RecognizePiiEntitiesResult("0", null, null, piiEntityCollection);
RecognizePiiEntitiesResult recognizeEntitiesResult2 = new RecognizePiiEntitiesResult("1", null, null, piiEntityCollection2);
return new RecognizePiiEntitiesResultCollection(
asList(recognizeEntitiesResult1, recognizeEntitiesResult2),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method to get the expected Batch Linked Entities
* @return A {@link RecognizeLinkedEntitiesResultCollection}.
*/
static RecognizeLinkedEntitiesResultCollection getExpectedBatchLinkedEntities() {
final TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
final List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResultList =
asList(
new RecognizeLinkedEntitiesResult(
"0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult(
"1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null)));
return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected linked Entities List 1
*/
static List<LinkedEntity> getLinkedEntitiesList1() {
final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Seattle", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 26);
LinkedEntity linkedEntity = new LinkedEntity(
"Seattle", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Seattle", "https:
"Wikipedia");
LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "5fbba6b8-85e1-4d41-9444-d9055436e473");
return asList(linkedEntity);
}
/**
* Helper method to get the expected linked Entities List 2
*/
static List<LinkedEntity> getLinkedEntitiesList2() {
LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 10);
LinkedEntity linkedEntity = new LinkedEntity(
"Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Microsoft", "https:
"Wikipedia");
LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85");
return asList(linkedEntity);
}
static List<LinkedEntity> getLinkedEntitiesList3() {
LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch("Microsoft", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, 0);
LinkedEntityMatch linkedEntityMatch1 = new LinkedEntityMatch("API's", 0.0);
LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch1, 61);
LinkedEntity linkedEntity = new LinkedEntity(
"Microsoft", new IterableStream<>(Collections.singletonList(linkedEntityMatch)),
"en", "Microsoft", "https:
"Wikipedia");
LinkedEntityPropertiesHelper.setBingEntitySearchApiId(linkedEntity, "a093e9b9-90f5-a3d5-c4b8-5855e1b01f85");
LinkedEntity linkedEntity1 = new LinkedEntity(
"Application programming interface", new IterableStream<>(Collections.singletonList(linkedEntityMatch1)),
"en", "Application programming interface",
"https:
"Wikipedia");
return asList(linkedEntity, linkedEntity1);
}
/**
* Helper method to get the expected Batch Key Phrases.
*/
static ExtractKeyPhrasesResultCollection getExpectedBatchKeyPhrases() {
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(49, 1);
TextDocumentStatistics textDocumentStatistics2 = new TextDocumentStatistics(21, 1);
ExtractKeyPhraseResult extractKeyPhraseResult1 = new ExtractKeyPhraseResult("0", textDocumentStatistics1, null, new KeyPhrasesCollection(new IterableStream<>(asList("Hello world", "input text")), null));
ExtractKeyPhraseResult extractKeyPhraseResult2 = new ExtractKeyPhraseResult("1", textDocumentStatistics2, null, new KeyPhrasesCollection(new IterableStream<>(asList("Bonjour", "monde")), null));
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(2, 2, 0, 2);
List<ExtractKeyPhraseResult> extractKeyPhraseResultList = asList(extractKeyPhraseResult1, extractKeyPhraseResult2);
return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResultList, DEFAULT_MODEL_VERSION, textDocumentBatchStatistics);
}
/**
* Helper method to get the expected Batch Text Sentiments
*/
static AnalyzeSentimentResultCollection getExpectedBatchTextSentiment() {
final TextDocumentStatistics textDocumentStatistics = new TextDocumentStatistics(67, 1);
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0",
textDocumentStatistics, null, getExpectedDocumentSentiment());
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1",
textDocumentStatistics, null, getExpectedDocumentSentiment2());
return new AnalyzeSentimentResultCollection(
asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* Helper method that get the first expected DocumentSentiment result.
*/
static DocumentSentiment getExpectedDocumentSentiment() {
final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 14);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0);
final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 23);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0);
final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 51);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0);
final TargetSentiment targetSentiment1 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment1, "hotel");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.NEGATIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 4);
final SentenceOpinion sentenceOpinion1 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1,
new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2)));
final TargetSentiment targetSentiment2 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment2, "gnocchi");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.POSITIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 59);
final SentenceOpinion sentenceOpinion2 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2,
new IterableStream<>(asList(assessmentSentiment3)));
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"The hotel was dark and unclean.", TextSentiment.NEGATIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 31);
final SentenceSentiment sentenceSentiment2 = new SentenceSentiment(
"The restaurant had amazing gnocchi.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 32);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 35);
return new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)),
null);
}
/**
* Helper method that get the second expected DocumentSentiment result.
*/
static DocumentSentiment getExpectedDocumentSentiment2() {
final AssessmentSentiment assessmentSentiment1 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment1, "dark");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment1, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment1, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment1, 50);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment1, 0);
final AssessmentSentiment assessmentSentiment2 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment2, "unclean");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment2, TextSentiment.NEGATIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment2, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment2, 59);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment2, 0);
final AssessmentSentiment assessmentSentiment3 = new AssessmentSentiment();
AssessmentSentimentPropertiesHelper.setText(assessmentSentiment3, "amazing");
AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment3, TextSentiment.POSITIVE);
AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment3,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment3, false);
AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment3, 19);
AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment3, 0);
final TargetSentiment targetSentiment1 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment1, "gnocchi");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment1, TextSentiment.POSITIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment1,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment1, 27);
final SentenceOpinion sentenceOpinion1 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion1, targetSentiment1);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion1,
new IterableStream<>(asList(assessmentSentiment3)));
final TargetSentiment targetSentiment2 = new TargetSentiment();
TargetSentimentPropertiesHelper.setText(targetSentiment2, "hotel");
TargetSentimentPropertiesHelper.setSentiment(targetSentiment2, TextSentiment.NEGATIVE);
TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment2,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
TargetSentimentPropertiesHelper.setOffset(targetSentiment2, 40);
final SentenceOpinion sentenceOpinion2 = new SentenceOpinion();
SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion2, targetSentiment2);
SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion2,
new IterableStream<>(asList(assessmentSentiment1, assessmentSentiment2)));
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"The restaurant had amazing gnocchi.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, new IterableStream<>(asList(sentenceOpinion1)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 35);
final SentenceSentiment sentenceSentiment2 = new SentenceSentiment(
"The hotel was dark and unclean.", TextSentiment.NEGATIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment2, new IterableStream<>(asList(sentenceOpinion2)));
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment2, 36);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment2, 31);
return new DocumentSentiment(TextSentiment.MIXED,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1, sentenceSentiment2)),
null);
}
/*
* This is the expected result for testing an input:
* "I had a wonderful trip to Seattle last week."
*/
static DocumentSentiment getExpectedDocumentSentimentForActions() {
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"I had a wonderful trip to Seattle last week.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null);
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 44);
return new DocumentSentiment(TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1)),
null);
}
/*
* This is the expected result for testing an input:
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static DocumentSentiment getExpectedDocumentSentimentForActions2() {
final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(
"Microsoft employee with ssn 859-98-0987 is using our awesome API's.", TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0));
SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, null);
SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, 0);
SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, 67);
return new DocumentSentiment(TextSentiment.POSITIVE,
new SentimentConfidenceScores(0.0, 0.0, 0.0),
new IterableStream<>(asList(sentenceSentiment1)),
null);
}
/**
* Helper method that get a single-page {@link AnalyzeHealthcareEntitiesResultCollection} list.
*/
static List<AnalyzeHealthcareEntitiesResultCollection>
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage() {
return asList(
getExpectedAnalyzeHealthcareEntitiesResultCollection(2,
asList(getRecognizeHealthcareEntitiesResult1("0"), getRecognizeHealthcareEntitiesResult2())));
}
/**
* Helper method that get a multiple-pages {@link AnalyzeHealthcareEntitiesResultCollection} list.
*/
static List<AnalyzeHealthcareEntitiesResultCollection>
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForMultiplePages(int startIndex, int firstPage,
int secondPage) {
List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults1 = new ArrayList<>();
int i = startIndex;
for (; i < startIndex + firstPage; i++) {
healthcareEntitiesResults1.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i)));
}
List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults2 = new ArrayList<>();
for (; i < startIndex + firstPage + secondPage; i++) {
healthcareEntitiesResults2.add(getRecognizeHealthcareEntitiesResult1(Integer.toString(i)));
}
List<AnalyzeHealthcareEntitiesResultCollection> result = new ArrayList<>();
result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(firstPage, healthcareEntitiesResults1));
if (secondPage != 0) {
result.add(getExpectedAnalyzeHealthcareEntitiesResultCollection(secondPage, healthcareEntitiesResults2));
}
return result;
}
/**
* Helper method that get the expected {@link AnalyzeHealthcareEntitiesResultCollection} result.
*
* @param sizePerPage batch size per page.
* @param healthcareEntitiesResults a collection of {@link AnalyzeHealthcareEntitiesResult}.
*/
static AnalyzeHealthcareEntitiesResultCollection getExpectedAnalyzeHealthcareEntitiesResultCollection(
int sizePerPage, List<AnalyzeHealthcareEntitiesResult> healthcareEntitiesResults) {
TextDocumentBatchStatistics textDocumentBatchStatistics = new TextDocumentBatchStatistics(
sizePerPage, sizePerPage, 0, sizePerPage);
final AnalyzeHealthcareEntitiesResultCollection analyzeHealthcareEntitiesResultCollection =
new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(healthcareEntitiesResults));
AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setModelVersion(analyzeHealthcareEntitiesResultCollection, "2020-09-03");
AnalyzeHealthcareEntitiesResultCollectionPropertiesHelper.setStatistics(analyzeHealthcareEntitiesResultCollection,
textDocumentBatchStatistics);
return analyzeHealthcareEntitiesResultCollection;
}
/**
* Result for
* "The patient is a 54-year-old gentleman with a history of progressive angina over the past several months.",
*/
static AnalyzeHealthcareEntitiesResult getRecognizeHealthcareEntitiesResult1(String documentId) {
TextDocumentStatistics textDocumentStatistics1 = new TextDocumentStatistics(105, 1);
final HealthcareEntity healthcareEntity1 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity1, "54-year-old");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity1, HealthcareEntityCategory.AGE);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity1, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity1, 17);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity1, 11);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity1,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity2 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity2, "gentleman");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity2, "Male population group");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity2, HealthcareEntityCategory.GENDER);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity2, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity2, 29);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity2, 9);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2,
IterableStream.of(Collections.emptyList()));
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity2,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity3 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity3, "progressive");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity3, HealthcareEntityCategory.fromString("Course"));
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity3, 0.91);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity3, 57);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity3, 11);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity3,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity4 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity4, "angina");
HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity4, "Angina Pectoris");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity4, HealthcareEntityCategory.SYMPTOM_OR_SIGN);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity4, 0.81);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity4, 69);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity4, 6);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4,
IterableStream.of(Collections.emptyList()));
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity4,
IterableStream.of(Collections.emptyList()));
final HealthcareEntity healthcareEntity5 = new HealthcareEntity();
HealthcareEntityPropertiesHelper.setText(healthcareEntity5, "past several months");
HealthcareEntityPropertiesHelper.setCategory(healthcareEntity5, HealthcareEntityCategory.TIME);
HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity5, 1.0);
HealthcareEntityPropertiesHelper.setOffset(healthcareEntity5, 85);
HealthcareEntityPropertiesHelper.setLength(healthcareEntity5, 19);
HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity5,
IterableStream.of(Collections.emptyList()));
final AnalyzeHealthcareEntitiesResult healthcareEntitiesResult1 = new AnalyzeHealthcareEntitiesResult(documentId,
textDocumentStatistics1, null);
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(healthcareEntitiesResult1,
new IterableStream<>(asList(healthcareEntity1, healthcareEntity2, healthcareEntity3, healthcareEntity4,
healthcareEntity5)));
final HealthcareEntityRelation healthcareEntityRelation1 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role1 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role1, "Course");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role1, healthcareEntity3);
final HealthcareEntityRelationRole role2 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role2, "Condition");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role2, healthcareEntity4);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation1,
HealthcareEntityRelationType.fromString("CourseOfCondition"));
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation1,
IterableStream.of(asList(role1, role2)));
final HealthcareEntityRelation healthcareEntityRelation2 = new HealthcareEntityRelation();
final HealthcareEntityRelationRole role3 = new HealthcareEntityRelationRole();
HealthcareEntityRelationRolePropertiesHelper.setName(role3, "Time");
HealthcareEntityRelationRolePropertiesHelper.setEntity(role3, healthcareEntity5);
HealthcareEntityRelationPropertiesHelper.setRelationType(healthcareEntityRelation2,
HealthcareEntityRelationType.TIME_OF_CONDITION);
HealthcareEntityRelationPropertiesHelper.setRoles(healthcareEntityRelation2,
IterableStream.of(asList(role2, role3)));
AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(healthcareEntitiesResult1,
IterableStream.of(asList(healthcareEntityRelation1, healthcareEntityRelation2)));
return healthcareEntitiesResult1;
}
/**
* Result for
* "The patient went for six minutes with minimal ST depressions in the anterior lateral leads ,
* thought due to fatigue and wrist pain , his anginal equivalent."
*/
/**
* RecognizeEntitiesResultCollection result for
* "I had a wonderful trip to Seattle last week."
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollection() {
return new RecognizeEntitiesResultCollection(
asList(new RecognizeEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesList1()), null)),
new RecognizeEntitiesResult("1", new TextDocumentStatistics(67, 1), null,
new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null))
),
"2020-04-01",
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
/**
* RecognizePiiEntitiesResultCollection result for
* "I had a wonderful trip to Seattle last week."
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollection() {
final PiiEntity piiEntity0 = new PiiEntity();
PiiEntityPropertiesHelper.setText(piiEntity0, "last week");
PiiEntityPropertiesHelper.setCategory(piiEntity0, PiiEntityCategory.fromString("DateTime"));
PiiEntityPropertiesHelper.setSubcategory(piiEntity0, "DateRange");
PiiEntityPropertiesHelper.setOffset(piiEntity0, 34);
return new RecognizePiiEntitiesResultCollection(
asList(
new RecognizePiiEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new PiiEntityCollection(new IterableStream<>(Arrays.asList(piiEntity0)),
"I had a wonderful trip to Seattle *********.", null)),
new RecognizePiiEntitiesResult("1", new TextDocumentStatistics(67, 1), null,
new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()),
"********* ******** with ssn *********** is using our awesome API's.", null))),
"2020-07-01",
new TextDocumentBatchStatistics(2, 2, 0, 2)
);
}
/**
* ExtractKeyPhrasesResultCollection result for
* "I had a wonderful trip to Seattle last week."
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollection() {
return new ExtractKeyPhrasesResultCollection(
asList(new ExtractKeyPhraseResult("0", new TextDocumentStatistics(44, 1),
null, new KeyPhrasesCollection(new IterableStream<>(asList("wonderful trip", "Seattle")), null)),
new ExtractKeyPhraseResult("1", new TextDocumentStatistics(67, 1),
null, new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")), null))),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollection() {
return new RecognizeLinkedEntitiesResultCollection(
asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList2()), null))
),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForActions() {
return new RecognizeLinkedEntitiesResultCollection(
asList(new RecognizeLinkedEntitiesResult("0", new TextDocumentStatistics(44, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList1()), null)),
new RecognizeLinkedEntitiesResult("1", new TextDocumentStatistics(20, 1), null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null))
),
DEFAULT_MODEL_VERSION,
new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForActions() {
final AnalyzeSentimentResult analyzeSentimentResult1 = new AnalyzeSentimentResult("0",
null, null, getExpectedDocumentSentimentForActions());
final AnalyzeSentimentResult analyzeSentimentResult2 = new AnalyzeSentimentResult("1",
null, null, getExpectedDocumentSentimentForActions2());
return new AnalyzeSentimentResultCollection(
asList(analyzeSentimentResult1, analyzeSentimentResult2),
DEFAULT_MODEL_VERSION, new TextDocumentBatchStatistics(2, 2, 0, 2));
}
static RecognizeEntitiesActionResult getExpectedRecognizeEntitiesActionResult(boolean isError, String actionName,
OffsetDateTime completeAt, RecognizeEntitiesResultCollection resultCollection, TextAnalyticsError actionError) {
RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult();
RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static RecognizePiiEntitiesActionResult getExpectedRecognizePiiEntitiesActionResult(boolean isError,
String actionName, OffsetDateTime completedAt, RecognizePiiEntitiesResultCollection resultCollection,
TextAnalyticsError actionError) {
RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult();
RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static ExtractKeyPhrasesActionResult getExpectedExtractKeyPhrasesActionResult(boolean isError, String actionName,
OffsetDateTime completedAt, ExtractKeyPhrasesResultCollection resultCollection,
TextAnalyticsError actionError) {
ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult();
ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completedAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static RecognizeLinkedEntitiesActionResult getExpectedRecognizeLinkedEntitiesActionResult(boolean isError,
String actionName, OffsetDateTime completeAt, RecognizeLinkedEntitiesResultCollection resultCollection,
TextAnalyticsError actionError) {
RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult();
RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
static AnalyzeSentimentActionResult getExpectedAnalyzeSentimentActionResult(boolean isError, String actionName,
OffsetDateTime completeAt, AnalyzeSentimentResultCollection resultCollection, TextAnalyticsError actionError) {
AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult();
AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, resultCollection);
TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, actionName);
TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, completeAt);
TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, isError);
TextAnalyticsActionResultPropertiesHelper.setError(actionResult, actionError);
return actionResult;
}
/**
* Helper method that get the expected AnalyzeBatchActionsResult result.
*/
static AnalyzeActionsResult getExpectedAnalyzeBatchActionsResult(
IterableStream<RecognizeEntitiesActionResult> recognizeEntitiesActionResults,
IterableStream<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults,
IterableStream<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults,
IterableStream<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults,
IterableStream<AnalyzeSentimentActionResult> analyzeSentimentActionResults) {
final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult();
AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult,
recognizeEntitiesActionResults);
AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult,
recognizePiiEntitiesActionResults);
AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult,
extractKeyPhrasesActionResults);
AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult,
recognizeLinkedEntitiesActionResults);
AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult,
analyzeSentimentActionResults);
return analyzeActionsResult;
}
/**
* CategorizedEntityCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizeEntitiesResultCollection getRecognizeEntitiesResultCollectionForPagination(int startIndex,
int documentCount) {
List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
recognizeEntitiesResults.add(new RecognizeEntitiesResult(Integer.toString(i), null, null,
new CategorizedEntityCollection(new IterableStream<>(getCategorizedEntitiesForPiiInput()), null)));
}
return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, "2020-04-01",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount));
}
/**
* RecognizePiiEntitiesResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizePiiEntitiesResultCollection getRecognizePiiEntitiesResultCollectionForPagination(int startIndex,
int documentCount) {
List<RecognizePiiEntitiesResult> recognizePiiEntitiesResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
recognizePiiEntitiesResults.add(new RecognizePiiEntitiesResult(Integer.toString(i), null, null,
new PiiEntityCollection(new IterableStream<>(getPiiEntitiesList1()),
"********* ******** with ssn *********** is using our awesome API's.", null)));
}
return new RecognizePiiEntitiesResultCollection(recognizePiiEntitiesResults, "2020-07-01",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)
);
}
/**
* ExtractKeyPhrasesResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static ExtractKeyPhrasesResultCollection getExtractKeyPhrasesResultCollectionForPagination(int startIndex,
int documentCount) {
List<ExtractKeyPhraseResult> extractKeyPhraseResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
extractKeyPhraseResults.add(new ExtractKeyPhraseResult(Integer.toString(i), null, null,
new KeyPhrasesCollection(new IterableStream<>(asList("Microsoft employee", "ssn", "awesome API")),
null)));
}
return new ExtractKeyPhrasesResultCollection(extractKeyPhraseResults, "2020-07-01",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount));
}
/**
* RecognizeLinkedEntitiesResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static RecognizeLinkedEntitiesResultCollection getRecognizeLinkedEntitiesResultCollectionForPagination(
int startIndex, int documentCount) {
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
recognizeLinkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(Integer.toString(i), null, null,
new LinkedEntityCollection(new IterableStream<>(getLinkedEntitiesList3()), null)));
}
return new RecognizeLinkedEntitiesResultCollection(recognizeLinkedEntitiesResults, "",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)
);
}
/**
* AnalyzeSentimentResultCollection result for
* "Microsoft employee with ssn 859-98-0987 is using our awesome API's."
*/
static AnalyzeSentimentResultCollection getAnalyzeSentimentResultCollectionForPagination(
int startIndex, int documentCount) {
List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>();
for (int i = startIndex; i < startIndex + documentCount; i++) {
analyzeSentimentResults.add(new AnalyzeSentimentResult(Integer.toString(i), null, null,
getExpectedDocumentSentimentForActions2()));
}
return new AnalyzeSentimentResultCollection(analyzeSentimentResults, "",
new TextDocumentBatchStatistics(documentCount, documentCount, 0, documentCount)
);
}
/**
* Helper method that get a multiple-pages (AnalyzeActionsResult) list.
*/
static List<AnalyzeActionsResult> getExpectedAnalyzeActionsResultListForMultiplePages(int startIndex,
int firstPage, int secondPage) {
List<AnalyzeActionsResult> analyzeActionsResults = new ArrayList<>();
analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(
false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, firstPage), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(
false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, firstPage), null)))
));
startIndex += firstPage;
analyzeActionsResults.add(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(
false, null, TIME_NOW, getRecognizeEntitiesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(
false, null, TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(
false, null, TIME_NOW, getRecognizePiiEntitiesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(
false, null, TIME_NOW, getExtractKeyPhrasesResultCollectionForPagination(startIndex, secondPage), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(
false, null, TIME_NOW, getAnalyzeSentimentResultCollectionForPagination(startIndex, secondPage), null)))
));
return analyzeActionsResults;
}
/**
* Helper method that get a customized TextAnalyticsError.
*/
static TextAnalyticsError getActionError(TextAnalyticsErrorCode errorCode, String taskName, String index) {
return new TextAnalyticsError(errorCode, "", "
}
/**
* Returns a stream of arguments that includes all combinations of eligible {@link HttpClient HttpClients} and
* service versions that should be tested.
*
* @return A stream of HttpClient and service version combinations to test.
*/
static Stream<Arguments> getTestParameters() {
List<Arguments> argumentsList = new ArrayList<>();
getHttpClients()
.forEach(httpClient -> {
Arrays.stream(TextAnalyticsServiceVersion.values()).filter(
TestUtils::shouldServiceVersionBeTested)
.forEach(serviceVersion -> argumentsList.add(Arguments.of(httpClient, serviceVersion)));
});
return argumentsList.stream();
}
/**
* Returns whether the given service version match the rules of test framework.
*
* <ul>
* <li>Using latest service version as default if no environment variable is set.</li>
* <li>If it's set to ALL, all Service versions in {@link TextAnalyticsServiceVersion} will be tested.</li>
* <li>Otherwise, Service version string should match env variable.</li>
* </ul>
*
* Environment values currently supported are: "ALL", "${version}".
* Use comma to separate http clients want to test.
* e.g. {@code set AZURE_TEST_SERVICE_VERSIONS = V1_0, V2_0}
*
* @param serviceVersion ServiceVersion needs to check
* @return Boolean indicates whether filters out the service version or not.
*/
private static boolean shouldServiceVersionBeTested(TextAnalyticsServiceVersion serviceVersion) {
String serviceVersionFromEnv =
Configuration.getGlobalConfiguration().get(AZURE_TEXT_ANALYTICS_TEST_SERVICE_VERSIONS);
if (CoreUtils.isNullOrEmpty(serviceVersionFromEnv)) {
return TextAnalyticsServiceVersion.getLatest().equals(serviceVersion);
}
if (AZURE_TEST_SERVICE_VERSIONS_VALUE_ALL.equalsIgnoreCase(serviceVersionFromEnv)) {
return true;
}
String[] configuredServiceVersionList = serviceVersionFromEnv.split(",");
return Arrays.stream(configuredServiceVersionList).anyMatch(configuredServiceVersion ->
serviceVersion.getVersion().equals(configuredServiceVersion.trim()));
}
private TestUtils() {
}
} |
You are modifying a code that you have introduced in the same PR (increasing PR size). It would be better to have reversed order of the commits. | private void finishBroadcastBufferBuilder() {
if (broadcastBufferBuilder != null) {
numBytesOut.inc(broadcastBufferBuilder.finish() * numSubpartitions);
numBuffersOut.inc(numSubpartitions);
broadcastBufferBuilder.close();
broadcastBufferBuilder = null;
}
} | broadcastBufferBuilder.close(); | private void finishBroadcastBufferBuilder() {
if (broadcastBufferBuilder != null) {
numBytesOut.inc(broadcastBufferBuilder.finish() * numSubpartitions);
numBuffersOut.inc(numSubpartitions);
broadcastBufferBuilder.close();
broadcastBufferBuilder = null;
}
} | class BufferWritingResultPartition extends ResultPartition {
/** The subpartitions of this partition. At least one. */
protected final ResultSubpartition[] subpartitions;
/**
* For non-broadcast mode, each subpartition maintains a separate BufferBuilder which might be
* null.
*/
private final BufferBuilder[] unicastBufferBuilders;
/** For broadcast mode, a single BufferBuilder is shared by all subpartitions. */
private BufferBuilder broadcastBufferBuilder;
private TimerGauge backPressuredTimeMsPerSecond = new TimerGauge();
public BufferWritingResultPartition(
String owningTaskName,
int partitionIndex,
ResultPartitionID partitionId,
ResultPartitionType partitionType,
ResultSubpartition[] subpartitions,
int numTargetKeyGroups,
ResultPartitionManager partitionManager,
@Nullable BufferCompressor bufferCompressor,
SupplierWithException<BufferPool, IOException> bufferPoolFactory) {
super(
owningTaskName,
partitionIndex,
partitionId,
partitionType,
subpartitions.length,
numTargetKeyGroups,
partitionManager,
bufferCompressor,
bufferPoolFactory);
this.subpartitions = checkNotNull(subpartitions);
this.unicastBufferBuilders = new BufferBuilder[subpartitions.length];
}
@Override
public void setup() throws IOException {
super.setup();
checkState(
bufferPool.getNumberOfRequiredMemorySegments() >= getNumberOfSubpartitions(),
"Bug in result partition setup logic: Buffer pool has not enough guaranteed buffers for"
+ " this result partition.");
}
@Override
public int getNumberOfQueuedBuffers() {
int totalBuffers = 0;
for (ResultSubpartition subpartition : subpartitions) {
totalBuffers += subpartition.unsynchronizedGetNumberOfQueuedBuffers();
}
return totalBuffers;
}
@Override
public int getNumberOfQueuedBuffers(int targetSubpartition) {
checkArgument(targetSubpartition >= 0 && targetSubpartition < numSubpartitions);
return subpartitions[targetSubpartition].unsynchronizedGetNumberOfQueuedBuffers();
}
protected void flushSubpartition(int targetSubpartition, boolean finishProducers) {
if (finishProducers) {
finishBroadcastBufferBuilder();
finishUnicastBufferBuilder(targetSubpartition);
}
subpartitions[targetSubpartition].flush();
}
protected void flushAllSubpartitions(boolean finishProducers) {
if (finishProducers) {
finishBroadcastBufferBuilder();
finishUnicastBufferBuilders();
}
for (ResultSubpartition subpartition : subpartitions) {
subpartition.flush();
}
}
@Override
public void emitRecord(ByteBuffer record, int targetSubpartition) throws IOException {
BufferBuilder buffer = appendUnicastDataForNewRecord(record, targetSubpartition);
while (record.hasRemaining()) {
finishUnicastBufferBuilder(targetSubpartition);
buffer = appendUnicastDataForRecordContinuation(record, targetSubpartition);
}
if (buffer.isFull()) {
finishUnicastBufferBuilder(targetSubpartition);
}
}
@Override
public void broadcastRecord(ByteBuffer record) throws IOException {
BufferBuilder buffer = appendBroadcastDataForNewRecord(record);
while (record.hasRemaining()) {
finishBroadcastBufferBuilder();
buffer = appendBroadcastDataForRecordContinuation(record);
}
if (buffer.isFull()) {
finishBroadcastBufferBuilder();
}
}
@Override
public void broadcastEvent(AbstractEvent event, boolean isPriorityEvent) throws IOException {
checkInProduceState();
finishBroadcastBufferBuilder();
finishUnicastBufferBuilders();
try (BufferConsumer eventBufferConsumer =
EventSerializer.toBufferConsumer(event, isPriorityEvent)) {
for (ResultSubpartition subpartition : subpartitions) {
subpartition.add(eventBufferConsumer.copy(), 0);
}
}
}
@Override
public void setMetricGroup(TaskIOMetricGroup metrics) {
super.setMetricGroup(metrics);
backPressuredTimeMsPerSecond = metrics.getBackPressuredTimePerSecond();
}
@Override
public ResultSubpartitionView createSubpartitionView(
int subpartitionIndex, BufferAvailabilityListener availabilityListener)
throws IOException {
checkElementIndex(subpartitionIndex, numSubpartitions, "Subpartition not found.");
checkState(!isReleased(), "Partition released.");
ResultSubpartition subpartition = subpartitions[subpartitionIndex];
ResultSubpartitionView readView = subpartition.createReadView(availabilityListener);
LOG.debug("Created {}", readView);
return readView;
}
@Override
public void finish() throws IOException {
finishBroadcastBufferBuilder();
finishUnicastBufferBuilders();
for (ResultSubpartition subpartition : subpartitions) {
subpartition.finish();
}
super.finish();
}
@Override
protected void releaseInternal() {
for (ResultSubpartition subpartition : subpartitions) {
try {
subpartition.release();
}
catch (Throwable t) {
LOG.error("Error during release of result subpartition: " + t.getMessage(), t);
}
}
}
private BufferBuilder appendUnicastDataForNewRecord(
final ByteBuffer record, final int targetSubpartition) throws IOException {
if (targetSubpartition < 0 || targetSubpartition > unicastBufferBuilders.length) {
throw new ArrayIndexOutOfBoundsException(targetSubpartition);
}
BufferBuilder buffer = unicastBufferBuilders[targetSubpartition];
if (buffer == null) {
buffer = requestNewUnicastBufferBuilder(targetSubpartition);
subpartitions[targetSubpartition].add(buffer.createBufferConsumerFromBeginning(), 0);
}
buffer.appendAndCommit(record);
return buffer;
}
private BufferBuilder appendUnicastDataForRecordContinuation(
final ByteBuffer remainingRecordBytes, final int targetSubpartition)
throws IOException {
final BufferBuilder buffer = requestNewUnicastBufferBuilder(targetSubpartition);
final int partialRecordBytes = buffer.appendAndCommit(remainingRecordBytes);
subpartitions[targetSubpartition].add(
buffer.createBufferConsumerFromBeginning(), partialRecordBytes);
return buffer;
}
private BufferBuilder appendBroadcastDataForNewRecord(final ByteBuffer record)
throws IOException {
BufferBuilder buffer = broadcastBufferBuilder;
if (buffer == null) {
buffer = requestNewBroadcastBufferBuilder();
createBroadcastBufferConsumers(buffer, 0);
}
buffer.appendAndCommit(record);
return buffer;
}
private BufferBuilder appendBroadcastDataForRecordContinuation(
final ByteBuffer remainingRecordBytes) throws IOException {
final BufferBuilder buffer = requestNewBroadcastBufferBuilder();
final int partialRecordBytes = buffer.appendAndCommit(remainingRecordBytes);
createBroadcastBufferConsumers(buffer, partialRecordBytes);
return buffer;
}
private void createBroadcastBufferConsumers(BufferBuilder buffer, int partialRecordBytes)
throws IOException {
try (final BufferConsumer consumer = buffer.createBufferConsumerFromBeginning()) {
for (ResultSubpartition subpartition : subpartitions) {
subpartition.add(consumer.copy(), partialRecordBytes);
}
}
}
private BufferBuilder requestNewUnicastBufferBuilder(int targetSubpartition)
throws IOException {
checkInProduceState();
ensureUnicastMode();
final BufferBuilder bufferBuilder = requestNewBufferBuilderFromPool(targetSubpartition);
unicastBufferBuilders[targetSubpartition] = bufferBuilder;
return bufferBuilder;
}
private BufferBuilder requestNewBroadcastBufferBuilder() throws IOException {
checkInProduceState();
ensureBroadcastMode();
final BufferBuilder bufferBuilder = requestNewBufferBuilderFromPool(0);
broadcastBufferBuilder = bufferBuilder;
return bufferBuilder;
}
private BufferBuilder requestNewBufferBuilderFromPool(int targetSubpartition)
throws IOException {
BufferBuilder bufferBuilder = bufferPool.requestBufferBuilder(targetSubpartition);
if (bufferBuilder != null) {
return bufferBuilder;
}
backPressuredTimeMsPerSecond.markStart();
try {
bufferBuilder = bufferPool.requestBufferBuilderBlocking(targetSubpartition);
backPressuredTimeMsPerSecond.markEnd();
return bufferBuilder;
} catch (InterruptedException e) {
throw new IOException("Interrupted while waiting for buffer");
}
}
private void finishUnicastBufferBuilder(int targetSubpartition) {
final BufferBuilder bufferBuilder = unicastBufferBuilders[targetSubpartition];
if (bufferBuilder != null) {
numBytesOut.inc(bufferBuilder.finish());
numBuffersOut.inc();
unicastBufferBuilders[targetSubpartition] = null;
bufferBuilder.close();
}
}
private void finishUnicastBufferBuilders() {
for (int channelIndex = 0; channelIndex < numSubpartitions; channelIndex++) {
finishUnicastBufferBuilder(channelIndex);
}
}
private void ensureUnicastMode() {
finishBroadcastBufferBuilder();
}
private void ensureBroadcastMode() {
finishUnicastBufferBuilders();
}
@VisibleForTesting
public TimerGauge getBackPressuredTimeMsPerSecond() {
return backPressuredTimeMsPerSecond;
}
@VisibleForTesting
public ResultSubpartition[] getAllPartitions() {
return subpartitions;
}
} | class BufferWritingResultPartition extends ResultPartition {
/** The subpartitions of this partition. At least one. */
protected final ResultSubpartition[] subpartitions;
/**
* For non-broadcast mode, each subpartition maintains a separate BufferBuilder which might be
* null.
*/
private final BufferBuilder[] unicastBufferBuilders;
/** For broadcast mode, a single BufferBuilder is shared by all subpartitions. */
private BufferBuilder broadcastBufferBuilder;
private TimerGauge backPressuredTimeMsPerSecond = new TimerGauge();
public BufferWritingResultPartition(
String owningTaskName,
int partitionIndex,
ResultPartitionID partitionId,
ResultPartitionType partitionType,
ResultSubpartition[] subpartitions,
int numTargetKeyGroups,
ResultPartitionManager partitionManager,
@Nullable BufferCompressor bufferCompressor,
SupplierWithException<BufferPool, IOException> bufferPoolFactory) {
super(
owningTaskName,
partitionIndex,
partitionId,
partitionType,
subpartitions.length,
numTargetKeyGroups,
partitionManager,
bufferCompressor,
bufferPoolFactory);
this.subpartitions = checkNotNull(subpartitions);
this.unicastBufferBuilders = new BufferBuilder[subpartitions.length];
}
@Override
public void setup() throws IOException {
super.setup();
checkState(
bufferPool.getNumberOfRequiredMemorySegments() >= getNumberOfSubpartitions(),
"Bug in result partition setup logic: Buffer pool has not enough guaranteed buffers for"
+ " this result partition.");
}
@Override
public int getNumberOfQueuedBuffers() {
int totalBuffers = 0;
for (ResultSubpartition subpartition : subpartitions) {
totalBuffers += subpartition.unsynchronizedGetNumberOfQueuedBuffers();
}
return totalBuffers;
}
@Override
public int getNumberOfQueuedBuffers(int targetSubpartition) {
checkArgument(targetSubpartition >= 0 && targetSubpartition < numSubpartitions);
return subpartitions[targetSubpartition].unsynchronizedGetNumberOfQueuedBuffers();
}
protected void flushSubpartition(int targetSubpartition, boolean finishProducers) {
if (finishProducers) {
finishBroadcastBufferBuilder();
finishUnicastBufferBuilder(targetSubpartition);
}
subpartitions[targetSubpartition].flush();
}
protected void flushAllSubpartitions(boolean finishProducers) {
if (finishProducers) {
finishBroadcastBufferBuilder();
finishUnicastBufferBuilders();
}
for (ResultSubpartition subpartition : subpartitions) {
subpartition.flush();
}
}
@Override
public void emitRecord(ByteBuffer record, int targetSubpartition) throws IOException {
BufferBuilder buffer = appendUnicastDataForNewRecord(record, targetSubpartition);
while (record.hasRemaining()) {
finishUnicastBufferBuilder(targetSubpartition);
buffer = appendUnicastDataForRecordContinuation(record, targetSubpartition);
}
if (buffer.isFull()) {
finishUnicastBufferBuilder(targetSubpartition);
}
}
@Override
public void broadcastRecord(ByteBuffer record) throws IOException {
BufferBuilder buffer = appendBroadcastDataForNewRecord(record);
while (record.hasRemaining()) {
finishBroadcastBufferBuilder();
buffer = appendBroadcastDataForRecordContinuation(record);
}
if (buffer.isFull()) {
finishBroadcastBufferBuilder();
}
}
@Override
public void broadcastEvent(AbstractEvent event, boolean isPriorityEvent) throws IOException {
checkInProduceState();
finishBroadcastBufferBuilder();
finishUnicastBufferBuilders();
try (BufferConsumer eventBufferConsumer =
EventSerializer.toBufferConsumer(event, isPriorityEvent)) {
for (ResultSubpartition subpartition : subpartitions) {
subpartition.add(eventBufferConsumer.copy(), 0);
}
}
}
@Override
public void setMetricGroup(TaskIOMetricGroup metrics) {
super.setMetricGroup(metrics);
backPressuredTimeMsPerSecond = metrics.getBackPressuredTimePerSecond();
}
@Override
public ResultSubpartitionView createSubpartitionView(
int subpartitionIndex, BufferAvailabilityListener availabilityListener)
throws IOException {
checkElementIndex(subpartitionIndex, numSubpartitions, "Subpartition not found.");
checkState(!isReleased(), "Partition released.");
ResultSubpartition subpartition = subpartitions[subpartitionIndex];
ResultSubpartitionView readView = subpartition.createReadView(availabilityListener);
LOG.debug("Created {}", readView);
return readView;
}
@Override
public void finish() throws IOException {
finishBroadcastBufferBuilder();
finishUnicastBufferBuilders();
for (ResultSubpartition subpartition : subpartitions) {
subpartition.finish();
}
super.finish();
}
@Override
protected void releaseInternal() {
for (ResultSubpartition subpartition : subpartitions) {
try {
subpartition.release();
}
catch (Throwable t) {
LOG.error("Error during release of result subpartition: " + t.getMessage(), t);
}
}
}
private BufferBuilder appendUnicastDataForNewRecord(
final ByteBuffer record, final int targetSubpartition) throws IOException {
if (targetSubpartition < 0 || targetSubpartition > unicastBufferBuilders.length) {
throw new ArrayIndexOutOfBoundsException(targetSubpartition);
}
BufferBuilder buffer = unicastBufferBuilders[targetSubpartition];
if (buffer == null) {
buffer = requestNewUnicastBufferBuilder(targetSubpartition);
subpartitions[targetSubpartition].add(buffer.createBufferConsumerFromBeginning(), 0);
}
buffer.appendAndCommit(record);
return buffer;
}
private BufferBuilder appendUnicastDataForRecordContinuation(
final ByteBuffer remainingRecordBytes, final int targetSubpartition)
throws IOException {
final BufferBuilder buffer = requestNewUnicastBufferBuilder(targetSubpartition);
final int partialRecordBytes = buffer.appendAndCommit(remainingRecordBytes);
subpartitions[targetSubpartition].add(
buffer.createBufferConsumerFromBeginning(), partialRecordBytes);
return buffer;
}
private BufferBuilder appendBroadcastDataForNewRecord(final ByteBuffer record)
throws IOException {
BufferBuilder buffer = broadcastBufferBuilder;
if (buffer == null) {
buffer = requestNewBroadcastBufferBuilder();
createBroadcastBufferConsumers(buffer, 0);
}
buffer.appendAndCommit(record);
return buffer;
}
private BufferBuilder appendBroadcastDataForRecordContinuation(
final ByteBuffer remainingRecordBytes) throws IOException {
final BufferBuilder buffer = requestNewBroadcastBufferBuilder();
final int partialRecordBytes = buffer.appendAndCommit(remainingRecordBytes);
createBroadcastBufferConsumers(buffer, partialRecordBytes);
return buffer;
}
private void createBroadcastBufferConsumers(BufferBuilder buffer, int partialRecordBytes)
throws IOException {
try (final BufferConsumer consumer = buffer.createBufferConsumerFromBeginning()) {
for (ResultSubpartition subpartition : subpartitions) {
subpartition.add(consumer.copy(), partialRecordBytes);
}
}
}
private BufferBuilder requestNewUnicastBufferBuilder(int targetSubpartition)
throws IOException {
checkInProduceState();
ensureUnicastMode();
final BufferBuilder bufferBuilder = requestNewBufferBuilderFromPool(targetSubpartition);
unicastBufferBuilders[targetSubpartition] = bufferBuilder;
return bufferBuilder;
}
private BufferBuilder requestNewBroadcastBufferBuilder() throws IOException {
checkInProduceState();
ensureBroadcastMode();
final BufferBuilder bufferBuilder = requestNewBufferBuilderFromPool(0);
broadcastBufferBuilder = bufferBuilder;
return bufferBuilder;
}
private BufferBuilder requestNewBufferBuilderFromPool(int targetSubpartition)
throws IOException {
BufferBuilder bufferBuilder = bufferPool.requestBufferBuilder(targetSubpartition);
if (bufferBuilder != null) {
return bufferBuilder;
}
backPressuredTimeMsPerSecond.markStart();
try {
bufferBuilder = bufferPool.requestBufferBuilderBlocking(targetSubpartition);
backPressuredTimeMsPerSecond.markEnd();
return bufferBuilder;
} catch (InterruptedException e) {
throw new IOException("Interrupted while waiting for buffer");
}
}
private void finishUnicastBufferBuilder(int targetSubpartition) {
final BufferBuilder bufferBuilder = unicastBufferBuilders[targetSubpartition];
if (bufferBuilder != null) {
numBytesOut.inc(bufferBuilder.finish());
numBuffersOut.inc();
unicastBufferBuilders[targetSubpartition] = null;
bufferBuilder.close();
}
}
private void finishUnicastBufferBuilders() {
for (int channelIndex = 0; channelIndex < numSubpartitions; channelIndex++) {
finishUnicastBufferBuilder(channelIndex);
}
}
private void ensureUnicastMode() {
finishBroadcastBufferBuilder();
}
private void ensureBroadcastMode() {
finishUnicastBufferBuilders();
}
@VisibleForTesting
public TimerGauge getBackPressuredTimeMsPerSecond() {
return backPressuredTimeMsPerSecond;
}
@VisibleForTesting
public ResultSubpartition[] getAllPartitions() {
return subpartitions;
}
} |
Hmm, it is nullable, but veneer will have it's default? | public Read withAttemptTimeout(Duration timeout) {
checkArgument(timeout.isLongerThan(Duration.ZERO), "attempt timeout must be positive");
BigtableReadOptions readOptions = getBigtableReadOptions();
return toBuilder()
.setBigtableReadOptions(readOptions.toBuilder().setAttemptTimeout(timeout).build())
.build();
} | checkArgument(timeout.isLongerThan(Duration.ZERO), "attempt timeout must be positive"); | public Read withAttemptTimeout(Duration timeout) {
checkArgument(timeout.isLongerThan(Duration.ZERO), "attempt timeout must be positive");
BigtableReadOptions readOptions = getBigtableReadOptions();
return toBuilder()
.setBigtableReadOptions(readOptions.toBuilder().setAttemptTimeout(timeout).build())
.build();
} | class to using the SegmentReader. If
* null is passed, this behavior will be disabled and the stream reader will be used.
*
* <p>Does not modify this object.
*
* <p>When we have a builder, we initialize the value. When they call the method then we
* override the value
*/
@Experimental(Kind.SOURCE_SINK)
public Read withMaxBufferElementCount(@Nullable Integer maxBufferElementCount) {
BigtableReadOptions bigtableReadOptions = getBigtableReadOptions();
return toBuilder()
.setBigtableReadOptions(
bigtableReadOptions
.toBuilder()
.setMaxBufferElementCount(maxBufferElementCount)
.build())
.build();
} | class to using the SegmentReader. If
* null is passed, this behavior will be disabled and the stream reader will be used.
*
* <p>Does not modify this object.
*
* <p>When we have a builder, we initialize the value. When they call the method then we
* override the value
*/
@Experimental(Kind.SOURCE_SINK)
public Read withMaxBufferElementCount(@Nullable Integer maxBufferElementCount) {
BigtableReadOptions bigtableReadOptions = getBigtableReadOptions();
return toBuilder()
.setBigtableReadOptions(
bigtableReadOptions
.toBuilder()
.setMaxBufferElementCount(maxBufferElementCount)
.build())
.build();
} |
Any reason why response tag is modifiable but failure tag is final? | private Result(TupleTag<KV<RequestT, ResponseT>> responseTag, PCollectionTuple pct) {
this.pipeline = pct.getPipeline();
this.responseTag = responseTag;
this.responses = pct.get(responseTag);
this.failures = pct.get(FAILURE_TAG);
} | this.failures = pct.get(FAILURE_TAG); | private Result(TupleTag<KV<RequestT, ResponseT>> responseTag, PCollectionTuple pct) {
this.pipeline = pct.getPipeline();
this.responseTag = responseTag;
this.responses = pct.get(responseTag);
this.failures = pct.get(FAILURE_TAG);
} | class Result<RequestT, ResponseT> implements POutput {
static <RequestT, ResponseT> Result<RequestT, ResponseT> of(
TupleTag<KV<RequestT, ResponseT>> responseTag, PCollectionTuple pct) {
return new Result<>(responseTag, pct);
}
private final Pipeline pipeline;
private final TupleTag<KV<RequestT, ResponseT>> responseTag;
private final PCollection<KV<RequestT, ResponseT>> responses;
private final PCollection<ApiIOError> failures;
PCollection<KV<RequestT, ResponseT>> getResponses() {
return responses;
}
PCollection<ApiIOError> getFailures() {
return failures;
}
@Override
public Pipeline getPipeline() {
return this.pipeline;
}
@Override
public Map<TupleTag<?>, PValue> expand() {
return ImmutableMap.of(
responseTag, responses,
FAILURE_TAG, failures);
}
@Override
public void finishSpecifyingOutput(
String transformName, PInput input, PTransform<?, ?> transform) {}
} | class Result<RequestT, ResponseT> implements POutput {
static <RequestT, ResponseT> Result<RequestT, ResponseT> of(
TupleTag<KV<RequestT, ResponseT>> responseTag, PCollectionTuple pct) {
return new Result<>(responseTag, pct);
}
private final Pipeline pipeline;
private final TupleTag<KV<RequestT, ResponseT>> responseTag;
private final PCollection<KV<RequestT, ResponseT>> responses;
private final PCollection<ApiIOError> failures;
PCollection<KV<RequestT, ResponseT>> getResponses() {
return responses;
}
PCollection<ApiIOError> getFailures() {
return failures;
}
@Override
public Pipeline getPipeline() {
return this.pipeline;
}
@Override
public Map<TupleTag<?>, PValue> expand() {
return ImmutableMap.of(
responseTag, responses,
FAILURE_TAG, failures);
}
@Override
public void finishSpecifyingOutput(
String transformName, PInput input, PTransform<?, ?> transform) {}
} |
test case should be added to cover the new behavior. | private List<Column> getFileSchema() throws DdlException {
if (fileStatuses.isEmpty()) {
return Lists.newArrayList();
}
TNetworkAddress address;
List<Long> nodeIds = GlobalStateMgr.getCurrentSystemInfo().getBackendIds(true);
if (nodeIds.isEmpty()) {
if (RunMode.getCurrentRunMode() == RunMode.SHARED_NOTHING) {
throw new DdlException("Failed to send proxy request. No alive backends");
} else {
nodeIds.addAll(GlobalStateMgr.getCurrentSystemInfo().getComputeNodeIds(true));
if (nodeIds.isEmpty()) {
throw new DdlException("Failed to send proxy request. " +
"No alive backends or compute nodes");
}
}
}
Collections.shuffle(nodeIds);
ComputeNode node = GlobalStateMgr.getCurrentSystemInfo().getBackendOrComputeNode(nodeIds.get(0));
address = new TNetworkAddress(node.getHost(), node.getBrpcPort());
PGetFileSchemaResult result;
try {
PGetFileSchemaRequest request = getGetFileSchemaRequest(fileStatuses);
Future<PGetFileSchemaResult> future = BackendServiceClient.getInstance().getFileSchema(address, request);
result = future.get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new DdlException("failed to get file schema", e);
} catch (Exception e) {
throw new DdlException("failed to get file schema", e);
}
if (TStatusCode.findByValue(result.status.statusCode) != TStatusCode.OK) {
throw new DdlException("failed to get file schema, path: " + path + ", error: " + result.status.errorMsgs);
}
List<Column> columns = new ArrayList<>();
for (PSlotDescriptor slot : result.schema) {
columns.add(new Column(slot.colName, Type.fromProtobuf(slot.slotType), true));
}
return columns;
} | address = new TNetworkAddress(node.getHost(), node.getBrpcPort()); | private List<Column> getFileSchema() throws DdlException {
if (fileStatuses.isEmpty()) {
return Lists.newArrayList();
}
TNetworkAddress address;
List<Long> nodeIds = GlobalStateMgr.getCurrentSystemInfo().getBackendIds(true);
if (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA) {
nodeIds.addAll(GlobalStateMgr.getCurrentSystemInfo().getComputeNodeIds(true));
}
if (nodeIds.isEmpty()) {
if (RunMode.getCurrentRunMode() == RunMode.SHARED_NOTHING) {
throw new DdlException("Failed to send proxy request. No alive backends");
} else {
throw new DdlException("Failed to send proxy request. No alive backends or compute nodes");
}
}
Collections.shuffle(nodeIds);
ComputeNode node = GlobalStateMgr.getCurrentSystemInfo().getBackendOrComputeNode(nodeIds.get(0));
address = new TNetworkAddress(node.getHost(), node.getBrpcPort());
PGetFileSchemaResult result;
try {
PGetFileSchemaRequest request = getGetFileSchemaRequest(fileStatuses);
Future<PGetFileSchemaResult> future = BackendServiceClient.getInstance().getFileSchema(address, request);
result = future.get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new DdlException("failed to get file schema", e);
} catch (Exception e) {
throw new DdlException("failed to get file schema", e);
}
if (TStatusCode.findByValue(result.status.statusCode) != TStatusCode.OK) {
throw new DdlException("failed to get file schema, path: " + path + ", error: " + result.status.errorMsgs);
}
List<Column> columns = new ArrayList<>();
for (PSlotDescriptor slot : result.schema) {
columns.add(new Column(slot.colName, Type.fromProtobuf(slot.slotType), true));
}
return columns;
} | class TableFunctionTable extends Table {
private static final Logger LOG = LogManager.getLogger(TableFunctionTable.class);
public static final String FAKE_PATH = "fake:
public static final String PROPERTY_PATH = "path";
public static final String PROPERTY_FORMAT = "format";
public static final String PROPERTY_COLUMNS_FROM_PATH = "columns_from_path";
private String path;
private String format;
private String compressionType;
private List<String> columnsFromPath = new ArrayList<>();
private final Map<String, String> properties;
@Nullable
private List<Integer> partitionColumnIDs;
private boolean writeSingleFile;
private List<TBrokerFileStatus> fileStatuses = Lists.newArrayList();
public TableFunctionTable(Map<String, String> properties) throws DdlException {
super(TableType.TABLE_FUNCTION);
super.setId(-1);
super.setName("table_function_table");
this.properties = properties;
parseProperties();
parseFiles();
List<Column> columns = new ArrayList<>();
if (path.startsWith(FAKE_PATH)) {
columns.add(new Column("col_int", Type.INT));
columns.add(new Column("col_string", Type.VARCHAR));
} else {
columns = getFileSchema();
}
columns.addAll(getSchemaFromPath());
setNewFullSchema(columns);
}
public TableFunctionTable(String path, String format, String compressionType, List<Column> columns,
@Nullable List<Integer> partitionColumnIDs, boolean writeSingleFile,
Map<String, String> properties) {
super(TableType.TABLE_FUNCTION);
verify(!Strings.isNullOrEmpty(path), "path is null or empty");
verify(!(partitionColumnIDs != null && writeSingleFile));
this.path = path;
this.format = format;
this.compressionType = compressionType;
this.partitionColumnIDs = partitionColumnIDs;
this.writeSingleFile = writeSingleFile;
this.properties = properties;
super.setNewFullSchema(columns);
}
@Override
public boolean supportInsert() {
return true;
}
public List<TBrokerFileStatus> fileList() {
return fileStatuses;
}
@Override
public Map<String, String> getProperties() {
return properties;
}
@Override
public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) {
TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.TABLE_FUNCTION_TABLE, fullSchema.size(),
0, "_table_function_table", "_table_function_db");
TTableFunctionTable tTableFunctionTable = this.toTTableFunctionTable();
tTableDescriptor.setTableFunctionTable(tTableFunctionTable);
return tTableDescriptor;
}
public TTableFunctionTable toTTableFunctionTable() {
TTableFunctionTable tTableFunctionTable = new TTableFunctionTable();
List<TColumn> tColumns = getFullSchema().stream().map(Column::toThrift).collect(Collectors.toList());
tTableFunctionTable.setPath(path);
tTableFunctionTable.setColumns(tColumns);
tTableFunctionTable.setFile_format(format);
tTableFunctionTable.setWrite_single_file(writeSingleFile);
tTableFunctionTable.setCompression_type(PARQUET_COMPRESSION_TYPE_MAP.get(compressionType));
if (partitionColumnIDs != null) {
tTableFunctionTable.setPartition_column_ids(partitionColumnIDs);
}
return tTableFunctionTable;
}
public String getFormat() {
return format;
}
public String getPath() {
return path;
}
private void parseProperties() throws DdlException {
if (properties == null) {
throw new DdlException("Please set properties of table function");
}
path = properties.get(PROPERTY_PATH);
if (Strings.isNullOrEmpty(path)) {
throw new DdlException("path is null. Please add properties(path='xxx') when create table");
}
format = properties.get(PROPERTY_FORMAT);
if (Strings.isNullOrEmpty(format)) {
throw new DdlException("format is null. Please add properties(format='xxx') when create table");
}
if (!format.equalsIgnoreCase("parquet") && !format.equalsIgnoreCase("orc")) {
throw new DdlException("not supported format: " + format);
}
String colsFromPathProp = properties.get(PROPERTY_COLUMNS_FROM_PATH);
if (!Strings.isNullOrEmpty(colsFromPathProp)) {
String[] colsFromPath = colsFromPathProp.split(",");
for (String col : colsFromPath) {
columnsFromPath.add(col.trim());
}
}
}
private void parseFiles() throws DdlException {
try {
if (path.startsWith("fake:
TBrokerFileStatus file1 = new TBrokerFileStatus();
file1.isDir = false;
file1.path = "fake:
file1.size = 1024;
fileStatuses.add(file1);
TBrokerFileStatus file2 = new TBrokerFileStatus();
file2.isDir = false;
file2.path = "fake:
file2.size = 2048;
fileStatuses.add(file2);
return;
}
List<String> pieces = Splitter.on(",").trimResults().omitEmptyStrings().splitToList(path);
for (String piece : ListUtils.emptyIfNull(pieces)) {
HdfsUtil.parseFile(piece, new BrokerDesc(properties), fileStatuses);
}
} catch (UserException e) {
LOG.error("parse files error", e);
throw new DdlException("failed to parse files", e);
}
if (fileStatuses.isEmpty()) {
throw new DdlException("no file found with given path pattern: " + path);
}
}
private PGetFileSchemaRequest getGetFileSchemaRequest(List<TBrokerFileStatus> filelist) throws TException {
TBrokerScanRangeParams params = new TBrokerScanRangeParams();
params.setUse_broker(false);
params.setSrc_slot_ids(new ArrayList<>());
params.setProperties(properties);
try {
THdfsProperties hdfsProperties = new THdfsProperties();
HdfsUtil.getTProperties(filelist.get(0).path, new BrokerDesc(properties), hdfsProperties);
params.setHdfs_properties(hdfsProperties);
} catch (UserException e) {
throw new TException("failed to parse files: " + e.getMessage());
}
TBrokerScanRange brokerScanRange = new TBrokerScanRange();
brokerScanRange.setParams(params);
brokerScanRange.setBroker_addresses(Lists.newArrayList());
TFileFormatType fileFormat;
switch (format.toLowerCase()) {
case "parquet":
fileFormat = TFileFormatType.FORMAT_PARQUET;
break;
case "orc":
fileFormat = TFileFormatType.FORMAT_ORC;
break;
default:
throw new TException("unsupported format: " + format);
}
for (int i = 0; i < filelist.size(); ++i) {
TBrokerRangeDesc rangeDesc = new TBrokerRangeDesc();
rangeDesc.setFile_type(TFileType.FILE_BROKER);
rangeDesc.setFormat_type(fileFormat);
rangeDesc.setPath(filelist.get(i).path);
rangeDesc.setSplittable(filelist.get(i).isSplitable);
rangeDesc.setStart_offset(0);
rangeDesc.setFile_size(filelist.get(i).size);
rangeDesc.setSize(filelist.get(i).size);
rangeDesc.setNum_of_columns_from_file(0);
rangeDesc.setColumns_from_path(new ArrayList<>());
brokerScanRange.addToRanges(rangeDesc);
}
TScanRange scanRange = new TScanRange();
scanRange.setBroker_scan_range(brokerScanRange);
final TGetFileSchemaRequest tRequest = new TGetFileSchemaRequest();
tRequest.setScan_range(scanRange);
final PGetFileSchemaRequest pRequest = new PGetFileSchemaRequest();
pRequest.setRequest(tRequest);
return pRequest;
}
private List<Column> getSchemaFromPath() throws DdlException {
List<Column> columns = new ArrayList<>();
if (!columnsFromPath.isEmpty()) {
for (String colName : columnsFromPath) {
Optional<Column> column = columns.stream().filter(col -> col.nameEquals(colName, false)).findFirst();
if (column.isPresent()) {
throw new DdlException("duplicated name in columns from path, " +
"a column with same name already exists in the file table: " + colName);
}
columns.add(new Column(colName, ScalarType.createDefaultString(), true));
}
}
return columns;
}
public List<ImportColumnDesc> getColumnExprList() {
List<ImportColumnDesc> exprs = new ArrayList<>();
List<Column> columns = super.getFullSchema();
for (Column column : columns) {
exprs.add(new ImportColumnDesc(column.getName()));
}
return exprs;
}
public List<String> getColumnsFromPath() {
return columnsFromPath;
}
@Override
public String toString() {
return String.format("TABLE('path'='%s', 'format'='%s')", path, format);
}
@Override
public boolean isSupported() {
return true;
}
@Override
public List<String> getPartitionColumnNames() {
if (partitionColumnIDs == null) {
return new ArrayList<>();
}
return partitionColumnIDs.stream().map(id -> fullSchema.get(id).getName()).collect(Collectors.toList());
}
public boolean isWriteSingleFile() {
return writeSingleFile;
}
} | class TableFunctionTable extends Table {
private static final Logger LOG = LogManager.getLogger(TableFunctionTable.class);
public static final String FAKE_PATH = "fake:
public static final String PROPERTY_PATH = "path";
public static final String PROPERTY_FORMAT = "format";
public static final String PROPERTY_COLUMNS_FROM_PATH = "columns_from_path";
private String path;
private String format;
private String compressionType;
private List<String> columnsFromPath = new ArrayList<>();
private final Map<String, String> properties;
@Nullable
private List<Integer> partitionColumnIDs;
private boolean writeSingleFile;
private List<TBrokerFileStatus> fileStatuses = Lists.newArrayList();
public TableFunctionTable(Map<String, String> properties) throws DdlException {
super(TableType.TABLE_FUNCTION);
super.setId(-1);
super.setName("table_function_table");
this.properties = properties;
parseProperties();
parseFiles();
List<Column> columns = new ArrayList<>();
if (path.startsWith(FAKE_PATH)) {
columns.add(new Column("col_int", Type.INT));
columns.add(new Column("col_string", Type.VARCHAR));
} else {
columns = getFileSchema();
}
columns.addAll(getSchemaFromPath());
setNewFullSchema(columns);
}
public TableFunctionTable(String path, String format, String compressionType, List<Column> columns,
@Nullable List<Integer> partitionColumnIDs, boolean writeSingleFile,
Map<String, String> properties) {
super(TableType.TABLE_FUNCTION);
verify(!Strings.isNullOrEmpty(path), "path is null or empty");
verify(!(partitionColumnIDs != null && writeSingleFile));
this.path = path;
this.format = format;
this.compressionType = compressionType;
this.partitionColumnIDs = partitionColumnIDs;
this.writeSingleFile = writeSingleFile;
this.properties = properties;
super.setNewFullSchema(columns);
}
@Override
public boolean supportInsert() {
return true;
}
public List<TBrokerFileStatus> fileList() {
return fileStatuses;
}
@Override
public Map<String, String> getProperties() {
return properties;
}
@Override
public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) {
TTableDescriptor tTableDescriptor = new TTableDescriptor(id, TTableType.TABLE_FUNCTION_TABLE, fullSchema.size(),
0, "_table_function_table", "_table_function_db");
TTableFunctionTable tTableFunctionTable = this.toTTableFunctionTable();
tTableDescriptor.setTableFunctionTable(tTableFunctionTable);
return tTableDescriptor;
}
public TTableFunctionTable toTTableFunctionTable() {
TTableFunctionTable tTableFunctionTable = new TTableFunctionTable();
List<TColumn> tColumns = getFullSchema().stream().map(Column::toThrift).collect(Collectors.toList());
tTableFunctionTable.setPath(path);
tTableFunctionTable.setColumns(tColumns);
tTableFunctionTable.setFile_format(format);
tTableFunctionTable.setWrite_single_file(writeSingleFile);
tTableFunctionTable.setCompression_type(PARQUET_COMPRESSION_TYPE_MAP.get(compressionType));
if (partitionColumnIDs != null) {
tTableFunctionTable.setPartition_column_ids(partitionColumnIDs);
}
return tTableFunctionTable;
}
public String getFormat() {
return format;
}
public String getPath() {
return path;
}
private void parseProperties() throws DdlException {
if (properties == null) {
throw new DdlException("Please set properties of table function");
}
path = properties.get(PROPERTY_PATH);
if (Strings.isNullOrEmpty(path)) {
throw new DdlException("path is null. Please add properties(path='xxx') when create table");
}
format = properties.get(PROPERTY_FORMAT);
if (Strings.isNullOrEmpty(format)) {
throw new DdlException("format is null. Please add properties(format='xxx') when create table");
}
if (!format.equalsIgnoreCase("parquet") && !format.equalsIgnoreCase("orc")) {
throw new DdlException("not supported format: " + format);
}
String colsFromPathProp = properties.get(PROPERTY_COLUMNS_FROM_PATH);
if (!Strings.isNullOrEmpty(colsFromPathProp)) {
String[] colsFromPath = colsFromPathProp.split(",");
for (String col : colsFromPath) {
columnsFromPath.add(col.trim());
}
}
}
private void parseFiles() throws DdlException {
try {
if (path.startsWith("fake:
TBrokerFileStatus file1 = new TBrokerFileStatus();
file1.isDir = false;
file1.path = "fake:
file1.size = 1024;
fileStatuses.add(file1);
TBrokerFileStatus file2 = new TBrokerFileStatus();
file2.isDir = false;
file2.path = "fake:
file2.size = 2048;
fileStatuses.add(file2);
return;
}
List<String> pieces = Splitter.on(",").trimResults().omitEmptyStrings().splitToList(path);
for (String piece : ListUtils.emptyIfNull(pieces)) {
HdfsUtil.parseFile(piece, new BrokerDesc(properties), fileStatuses);
}
} catch (UserException e) {
LOG.error("parse files error", e);
throw new DdlException("failed to parse files", e);
}
if (fileStatuses.isEmpty()) {
throw new DdlException("no file found with given path pattern: " + path);
}
}
private PGetFileSchemaRequest getGetFileSchemaRequest(List<TBrokerFileStatus> filelist) throws TException {
TBrokerScanRangeParams params = new TBrokerScanRangeParams();
params.setUse_broker(false);
params.setSrc_slot_ids(new ArrayList<>());
params.setProperties(properties);
try {
THdfsProperties hdfsProperties = new THdfsProperties();
HdfsUtil.getTProperties(filelist.get(0).path, new BrokerDesc(properties), hdfsProperties);
params.setHdfs_properties(hdfsProperties);
} catch (UserException e) {
throw new TException("failed to parse files: " + e.getMessage());
}
TBrokerScanRange brokerScanRange = new TBrokerScanRange();
brokerScanRange.setParams(params);
brokerScanRange.setBroker_addresses(Lists.newArrayList());
TFileFormatType fileFormat;
switch (format.toLowerCase()) {
case "parquet":
fileFormat = TFileFormatType.FORMAT_PARQUET;
break;
case "orc":
fileFormat = TFileFormatType.FORMAT_ORC;
break;
default:
throw new TException("unsupported format: " + format);
}
for (int i = 0; i < filelist.size(); ++i) {
TBrokerRangeDesc rangeDesc = new TBrokerRangeDesc();
rangeDesc.setFile_type(TFileType.FILE_BROKER);
rangeDesc.setFormat_type(fileFormat);
rangeDesc.setPath(filelist.get(i).path);
rangeDesc.setSplittable(filelist.get(i).isSplitable);
rangeDesc.setStart_offset(0);
rangeDesc.setFile_size(filelist.get(i).size);
rangeDesc.setSize(filelist.get(i).size);
rangeDesc.setNum_of_columns_from_file(0);
rangeDesc.setColumns_from_path(new ArrayList<>());
brokerScanRange.addToRanges(rangeDesc);
}
TScanRange scanRange = new TScanRange();
scanRange.setBroker_scan_range(brokerScanRange);
final TGetFileSchemaRequest tRequest = new TGetFileSchemaRequest();
tRequest.setScan_range(scanRange);
final PGetFileSchemaRequest pRequest = new PGetFileSchemaRequest();
pRequest.setRequest(tRequest);
return pRequest;
}
private List<Column> getSchemaFromPath() throws DdlException {
List<Column> columns = new ArrayList<>();
if (!columnsFromPath.isEmpty()) {
for (String colName : columnsFromPath) {
Optional<Column> column = columns.stream().filter(col -> col.nameEquals(colName, false)).findFirst();
if (column.isPresent()) {
throw new DdlException("duplicated name in columns from path, " +
"a column with same name already exists in the file table: " + colName);
}
columns.add(new Column(colName, ScalarType.createDefaultString(), true));
}
}
return columns;
}
public List<ImportColumnDesc> getColumnExprList() {
List<ImportColumnDesc> exprs = new ArrayList<>();
List<Column> columns = super.getFullSchema();
for (Column column : columns) {
exprs.add(new ImportColumnDesc(column.getName()));
}
return exprs;
}
public List<String> getColumnsFromPath() {
return columnsFromPath;
}
@Override
public String toString() {
return String.format("TABLE('path'='%s', 'format'='%s')", path, format);
}
@Override
public boolean isSupported() {
return true;
}
@Override
public List<String> getPartitionColumnNames() {
if (partitionColumnIDs == null) {
return new ArrayList<>();
}
return partitionColumnIDs.stream().map(id -> fullSchema.get(id).getName()).collect(Collectors.toList());
}
public boolean isWriteSingleFile() {
return writeSingleFile;
}
} |
This can be replaced by ArrayDeque as well. | public Collection<String> process(String input) throws PreprocessorException {
List<String> snippets = new ArrayList<>();
StringBuilder builder = new StringBuilder();
Deque<Character> brackets = new ConcurrentLinkedDeque<>();
boolean isInBacktickLiteral = false;
boolean isInQuoteLiteral = false;
boolean isInComment = false;
for (int i = 0; i < input.length(); i++) {
char character = input.charAt(i);
if (isInComment && character == NEW_LINE) {
isInComment = false;
}
if (!isInComment) {
builder.append(character);
if (character == DOUBLE_QUOTE && isNotEscaped(input, i)) {
isInQuoteLiteral = !isInQuoteLiteral;
} else if (character == BACK_TICK && isNotEscaped(input, i)) {
isInBacktickLiteral = !isInBacktickLiteral;
}
if (!isInBacktickLiteral && !isInQuoteLiteral) {
if (isInComment = isCommentStart(input, i)) {
builder.deleteCharAt(builder.length() - 1);
} else if (character == SEMICOLON && brackets.isEmpty()) {
addToList(snippets, builder);
builder.setLength(0);
} else if (isOpeningBracket(character)) {
brackets.push(character);
} else if (!brackets.isEmpty() && isBracketPair(brackets.peek(), character)) {
brackets.pop();
} else if (isClosingBracket(character)) {
if (brackets.isEmpty()) {
addErrorDiagnostic("syntax error: found closing brackets but opening one not found.");
throw new PreprocessorException();
}
}
}
}
}
if (!builder.isEmpty()) {
addToList(snippets, builder);
}
return snippets;
} | Deque<Character> brackets = new ConcurrentLinkedDeque<>(); | public Collection<String> process(String input) throws PreprocessorException {
List<String> snippets = new ArrayList<>();
StringBuilder builder = new StringBuilder();
Deque<Character> brackets = new ArrayDeque<>();
boolean isInBacktickLiteral = false;
boolean isInQuoteLiteral = false;
boolean isInComment = false;
for (int i = 0; i < input.length(); i++) {
char character = input.charAt(i);
if (isInComment && character == NEW_LINE) {
isInComment = false;
}
if (!isInComment) {
builder.append(character);
if (character == DOUBLE_QUOTE && isNotEscaped(input, i)) {
isInQuoteLiteral = !isInQuoteLiteral;
} else if (character == BACK_TICK && isNotEscaped(input, i)) {
isInBacktickLiteral = !isInBacktickLiteral;
}
if (!isInBacktickLiteral && !isInQuoteLiteral) {
if (isInComment = isCommentStart(input, i)) {
builder.deleteCharAt(builder.length() - 1);
} else if (character == SEMICOLON && brackets.isEmpty()) {
addToList(snippets, builder);
builder.setLength(0);
} else if (isOpeningBracket(character)) {
brackets.push(character);
} else if (!brackets.isEmpty() && isBracketPair(brackets.peek(), character)) {
brackets.pop();
} else if (isClosingBracket(character)) {
if (brackets.isEmpty()) {
addErrorDiagnostic("syntax error: found closing brackets but opening one not found.");
throw new PreprocessorException();
}
}
}
}
}
if (!builder.isEmpty()) {
addToList(snippets, builder);
}
return snippets;
} | class SeparatorPreprocessor extends Preprocessor {
private static final char ESCAPE_CHAR = '\\';
private static final char BACK_TICK = '`';
private static final char DOUBLE_QUOTE = '\"';
private static final char SEMICOLON = ';';
private static final char PARENTHESIS_OPEN = '(';
private static final char PARENTHESIS_CLOSE = ')';
private static final char SQUARE_BR_OPEN = '[';
private static final char SQUARE_BR_CLOSE = ']';
private static final char CURLY_BR_OPEN = '{';
private static final char CURLY_BR_CLOSE = '}';
private static final char COMMENT_START = '/';
private static final char NEW_LINE = '\n';
@Override
/**
* Adds the builders string representation
* to stack if string is not empty.
* Adds a semicolon to end if not present.
*
* @param stack Stack list.
* @param builder Builder to add.
*/
private void addToList(List<String> stack, StringBuilder builder) {
String string = builder.toString().trim();
if (string.isBlank()) {
return;
}
if (!string.endsWith(String.valueOf(SEMICOLON))) {
string = string + SEMICOLON;
}
if (string.length() == 1) {
return;
}
stack.add(string);
}
/**
* Whether a comment started.
*
* @param input Whole input string.
* @param position Position of character to check.
* @return Whether a comment started.
*/
private boolean isCommentStart(String input, int position) {
return position < input.length() - 1
&& input.charAt(position) == COMMENT_START
&& input.charAt(position + 1) == COMMENT_START;
}
/**
* Whether the character at the given position was not escaped or not.
*
* @param input Whole input string.
* @param position Position of character to check.
* @return Whether the character was escaped.
*/
private boolean isNotEscaped(String input, int position) {
return position <= 0 || input.charAt(position - 1) != ESCAPE_CHAR;
}
/**
* Whether the character is a opening bracket type.
*
* @param character Character to check.
* @return Whether the input is a opening bracket.
*/
private boolean isOpeningBracket(char character) {
return character == PARENTHESIS_OPEN
|| character == SQUARE_BR_OPEN
|| character == CURLY_BR_OPEN;
}
/**
* Whether the character is a closing bracket type.
*
* @param character Character to check.
* @return Whether the input is a closing bracket.
*/
private boolean isClosingBracket(char character) {
return character == PARENTHESIS_CLOSE
|| character == SQUARE_BR_CLOSE
|| character == CURLY_BR_CLOSE;
}
/**
* Whether the inputs resemble a pair of brackets.
*
* @param opening Opening bracket.
* @param closing Closing bracket.
* @return Whether the opening/closing brackets are matching brackets.
*/
private boolean isBracketPair(char opening, char closing) {
return (opening == PARENTHESIS_OPEN && closing == PARENTHESIS_CLOSE)
|| (opening == SQUARE_BR_OPEN && closing == SQUARE_BR_CLOSE)
|| (opening == CURLY_BR_OPEN && closing == CURLY_BR_CLOSE);
}
} | class SeparatorPreprocessor extends Preprocessor {
private static final char ESCAPE_CHAR = '\\';
private static final char BACK_TICK = '`';
private static final char DOUBLE_QUOTE = '\"';
private static final char SEMICOLON = ';';
private static final char PARENTHESIS_OPEN = '(';
private static final char PARENTHESIS_CLOSE = ')';
private static final char SQUARE_BR_OPEN = '[';
private static final char SQUARE_BR_CLOSE = ']';
private static final char CURLY_BR_OPEN = '{';
private static final char CURLY_BR_CLOSE = '}';
private static final char COMMENT_START = '/';
private static final char NEW_LINE = '\n';
@Override
/**
* Adds the builders string representation
* to stack if string is not empty.
* Adds a semicolon to end if not present.
*
* @param stack Stack list.
* @param builder Builder to add.
*/
private void addToList(List<String> stack, StringBuilder builder) {
String string = builder.toString().trim();
if (string.isBlank()) {
return;
}
if (!string.endsWith(String.valueOf(SEMICOLON))) {
string = string + SEMICOLON;
}
if (string.length() == 1) {
return;
}
stack.add(string);
}
/**
* Whether a comment started.
*
* @param input Whole input string.
* @param position Position of character to check.
* @return Whether a comment started.
*/
private boolean isCommentStart(String input, int position) {
return position < input.length() - 1
&& input.charAt(position) == COMMENT_START
&& input.charAt(position + 1) == COMMENT_START;
}
/**
* Whether the character at the given position was not escaped or not.
*
* @param input Whole input string.
* @param position Position of character to check.
* @return Whether the character was escaped.
*/
private boolean isNotEscaped(String input, int position) {
return position <= 0 || input.charAt(position - 1) != ESCAPE_CHAR;
}
/**
* Whether the character is a opening bracket type.
*
* @param character Character to check.
* @return Whether the input is a opening bracket.
*/
private boolean isOpeningBracket(char character) {
return character == PARENTHESIS_OPEN
|| character == SQUARE_BR_OPEN
|| character == CURLY_BR_OPEN;
}
/**
* Whether the character is a closing bracket type.
*
* @param character Character to check.
* @return Whether the input is a closing bracket.
*/
private boolean isClosingBracket(char character) {
return character == PARENTHESIS_CLOSE
|| character == SQUARE_BR_CLOSE
|| character == CURLY_BR_CLOSE;
}
/**
* Whether the inputs resemble a pair of brackets.
*
* @param opening Opening bracket.
* @param closing Closing bracket.
* @return Whether the opening/closing brackets are matching brackets.
*/
private boolean isBracketPair(char opening, char closing) {
return (opening == PARENTHESIS_OPEN && closing == PARENTHESIS_CLOSE)
|| (opening == SQUARE_BR_OPEN && closing == SQUARE_BR_CLOSE)
|| (opening == CURLY_BR_OPEN && closing == CURLY_BR_CLOSE);
}
} |
seems `buildAuthenticatedRequest` can throw exception if so `response.close();` won't be hit and that can leak connection. | public void applyCredentialsFilter(OkHttpClient.Builder clientBuilder) {
clientBuilder.addInterceptor(new Interceptor() {
@Override
public Response intercept(Chain chain) throws IOException {
Request originalRequest = chain.request();
HttpUrl url = chain.request().url();
Map<String, String> challengeMap = cache.getCachedChallenge(url);
Response response;
Pair<Request, HttpMessageSecurity> authenticatedRequestPair;
if (challengeMap != null) {
authenticatedRequestPair = buildAuthenticatedRequest(originalRequest, challengeMap);
} else {
response = chain.proceed(buildEmptyRequest(originalRequest));
if (response.code() != 401) {
return response;
}
authenticatedRequestPair = buildAuthenticatedRequest(originalRequest, response);
response.close();
}
response = chain.proceed(authenticatedRequestPair.getLeft());
if (response.code() == 200) {
return authenticatedRequestPair.getRight().unprotectResponse(response);
} else {
return response;
}
}
});
} | response.close(); | public void applyCredentialsFilter(OkHttpClient.Builder clientBuilder) {
clientBuilder.addInterceptor(new Interceptor() {
@Override
public Response intercept(Chain chain) throws IOException {
Request originalRequest = chain.request();
HttpUrl url = chain.request().url();
Map<String, String> challengeMap = cache.getCachedChallenge(url);
Response response;
Pair<Request, HttpMessageSecurity> authenticatedRequestPair;
if (challengeMap != null) {
authenticatedRequestPair = buildAuthenticatedRequest(originalRequest, challengeMap);
} else {
response = chain.proceed(buildEmptyRequest(originalRequest));
if (response.code() != 401) {
return response;
}
try {
authenticatedRequestPair = buildAuthenticatedRequest(originalRequest, response);
} finally {
response.close();
}
}
response = chain.proceed(authenticatedRequestPair.getLeft());
if (response.code() == 200) {
return authenticatedRequestPair.getRight().unprotectResponse(response);
} else {
return response;
}
}
});
} | class KeyVaultCredentials implements ServiceClientCredentials {
private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
private static final String BEARER_TOKEP_REFIX = "Bearer ";
private static final String CLIENT_ENCRYPTION_KEY_TYPE = "RSA";
private static final int CLIENT_ENCRYPTION_KEY_SIZE = 2048;
private List<String> supportedMethods = Arrays.asList("sign", "verify", "encrypt", "decrypt", "wrapkey",
"unwrapkey");
private JsonWebKey clientEncryptionKey = null;
private final ChallengeCache cache = new ChallengeCache();
@Override
/**
* Builds request with authenticated header. Protects request body if supported.
*
* @param originalRequest
* unprotected request without auth token.
* @param challengeMap
* the challenge map.
* @return Pair of protected request and HttpMessageSecurity used for
* encryption.
*/
private Pair<Request, HttpMessageSecurity> buildAuthenticatedRequest(Request originalRequest,
Map<String, String> challengeMap) throws IOException {
Boolean supportsPop = supportsMessageProtection(originalRequest.url().toString(), challengeMap);
if (supportsPop && this.clientEncryptionKey == null) {
try {
final KeyPairGenerator generator = KeyPairGenerator.getInstance(CLIENT_ENCRYPTION_KEY_TYPE);
generator.initialize(CLIENT_ENCRYPTION_KEY_SIZE);
this.clientEncryptionKey = JsonWebKey.fromRSA(generator.generateKeyPair()).withKid(UUID.randomUUID().toString());
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
AuthenticationResult authResult = getAuthenticationCredentials(supportsPop, challengeMap);
if (authResult == null) {
return null;
}
HttpMessageSecurity httpMessageSecurity = new HttpMessageSecurity(authResult.getAuthToken(),
supportsPop ? authResult.getPopKey() : "",
supportsPop ? challengeMap.get("x-ms-message-encryption-key") : "",
supportsPop ? challengeMap.get("x-ms-message-signing-key") : "",
this.clientEncryptionKey);
Request request = httpMessageSecurity.protectRequest(originalRequest);
return Pair.of(request, httpMessageSecurity);
}
/**
* Builds request with authenticated header. Protects request body if supported.
*
* @param originalRequest
* unprotected request without auth token.
* @param response
* response with unauthorized return code.
* @return Pair of protected request and HttpMessageSecurity used for
* encryption.
*/
private Pair<Request, HttpMessageSecurity> buildAuthenticatedRequest(Request originalRequest, Response response)
throws IOException {
String authenticateHeader = response.header(WWW_AUTHENTICATE);
Map<String, String> challengeMap = extractChallenge(authenticateHeader, BEARER_TOKEP_REFIX);
challengeMap.put("x-ms-message-encryption-key", response.header("x-ms-message-encryption-key"));
challengeMap.put("x-ms-message-signing-key", response.header("x-ms-message-signing-key"));
cache.addCachedChallenge(originalRequest.url(), challengeMap);
return buildAuthenticatedRequest(originalRequest, challengeMap);
}
/**
* Removes request body used for EKV authorization.
*
* @param request
* unprotected request without auth token.
* @return request with removed body.
*/
private Request buildEmptyRequest(Request request) {
RequestBody body = RequestBody.create(MediaType.parse("application/json"), "{}");
if (request.method().equalsIgnoreCase("get")) {
return request;
} else {
return request.newBuilder().method(request.method(), body).build();
}
}
/**
* Checks if resource supports message protection.
*
* @param url
* resource url.
* @param challengeMap
* the challenge map.
* @return true if message protection is supported.
*/
private Boolean supportsMessageProtection(String url, Map<String, String> challengeMap) {
if (!"true".equals(challengeMap.get("supportspop"))) {
return false;
}
if (!url.toLowerCase().contains("/keys/")) {
return false;
}
String[] tokens = url.split("\\?")[0].split("/");
return supportedMethods.contains(tokens[tokens.length - 1]);
}
/**
* Extracts the authentication challenges from the challenge map and calls the
* authentication callback to get the bearer token and return it.
*
* @param supportsPop
* is resource supports pop authentication.
* @param challengeMap
* the challenge map.
* @return AuthenticationResult with bearer token and PoP key.
*/
private AuthenticationResult getAuthenticationCredentials(Boolean supportsPop, Map<String, String> challengeMap) {
String authorization = challengeMap.get("authorization");
if (authorization == null) {
authorization = challengeMap.get("authorization_uri");
}
String resource = challengeMap.get("resource");
String scope = challengeMap.get("scope");
String schema = supportsPop ? "pop" : "bearer";
return doAuthenticate(authorization, resource, scope, schema);
}
/**
* Extracts the challenge off the authentication header.
*
* @param authenticateHeader
* the authentication header containing all the challenges.
* @param authChallengePrefix
* the authentication challenge name.
* @return a challenge map.
*/
private static Map<String, String> extractChallenge(String authenticateHeader, String authChallengePrefix) {
if (!isValidChallenge(authenticateHeader, authChallengePrefix)) {
return null;
}
authenticateHeader = authenticateHeader.toLowerCase().replace(authChallengePrefix.toLowerCase(), "");
String[] challenges = authenticateHeader.split(", ");
Map<String, String> challengeMap = new HashMap<String, String>();
for (String pair : challenges) {
String[] keyValue = pair.split("=");
challengeMap.put(keyValue[0].replaceAll("\"", ""), keyValue[1].replaceAll("\"", ""));
}
return challengeMap;
}
/**
* Verifies whether a challenge is bearer or not.
*
* @param authenticateHeader
* the authentication header containing all the challenges.
* @param authChallengePrefix
* the authentication challenge name.
* @return
*/
private static boolean isValidChallenge(String authenticateHeader, String authChallengePrefix) {
if (authenticateHeader != null && !authenticateHeader.isEmpty()
&& authenticateHeader.toLowerCase().startsWith(authChallengePrefix.toLowerCase())) {
return true;
}
return false;
}
/**
* Abstract method to be implemented.
*
* @param authorization
* Identifier of the authority, a URL.
* @param resource
* Identifier of the target resource that is the recipient of the
* requested token, a URL.
*
* @param scope
* The scope of the authentication request.
*
* @return AuthenticationResult with authorization token and PoP key.
*
* Answers a server challenge with a token header.
* <p>
* Implementations typically use ADAL to get a token, as performed in
* the sample below:
* </p>
*
* <pre>
* &
* public String doAuthenticate(String authorization, String resource, String scope) {
* String clientId = ...;
* String clientKey = ...;
* AuthenticationResult token = getAccessTokenFromClientCredentials(authorization, resource, clientId, clientKey);
* return token.getAccessToken();;
* }
*
* private static AuthenticationResult getAccessTokenFromClientCredentials(String authorization, String resource, String clientId, String clientKey) {
* AuthenticationContext context = null;
* AuthenticationResult result = null;
* ExecutorService service = null;
* try {
* service = Executors.newFixedThreadPool(1);
* context = new AuthenticationContext(authorization, false, service);
* ClientCredential credentials = new ClientCredential(clientId, clientKey);
* Future<AuthenticationResult> future = context.acquireToken(resource, credentials, null);
* result = future.get();
* } catch (Exception e) {
* throw new RuntimeException(e);
* } finally {
* service.shutdown();
* }
*
* if (result == null) {
* throw new RuntimeException("authentication result was null");
* }
* return result;
* }
* </pre>
*
* <p>
* <b>Note: The client key must be securely stored. It's advised to use
* two client applications - one for development and other for
* production - managed by separate parties.</b>
* </p>
*
*/
public String doAuthenticate(String authorization, String resource, String scope) {
return "";
}
/**
* Method to be implemented.
*
* @param authorization
* Identifier of the authority, a URL.
* @param resource
* Identifier of the target resource that is the recipient of the
* requested token, a URL.
* @param scope
* The scope of the authentication request.
*
* @param schema
* Authentication schema. Can be 'pop' or 'bearer'.
*
* @return AuthenticationResult with authorization token and PoP key.
*
* Answers a server challenge with a token header.
* <p>
* Implementations sends POST request to receive authentication token
* like in example below. ADAL currently doesn't support POP
* authentication.
* </p>
*
* <pre>
* public AuthenticationResult doAuthenticate(String authorization, String resource, String scope, String schema) {
* JsonWebKey clientJwk = GenerateJsonWebKey();
* JsonWebKey clientPublicJwk = GetJwkWithPublicKeyOnly(clientJwk);
* String token = GetAccessToken(authorization, resource, "pop".equals(schema), clientPublicJwk);
*
* return new AuthenticationResult(token, clientJwk.toString());
* }
*
* private JsonWebKey GenerateJsonWebKey() {
* final KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
* generator.initialize(2048);
* KeyPair clientRsaKeyPair = generator.generateKeyPair();
* JsonWebKey result = JsonWebKey.fromRSA(clientRsaKeyPair);
* result.withKid(UUID.randomUUID().toString());
* return result;
* }
*
* public static JsonWebKey GetJwkWithPublicKeyOnly(JsonWebKey jwk) {
* KeyPair publicOnly = jwk.toRSA(false);
* JsonWebKey jsonkeyPublic = JsonWebKey.fromRSA(publicOnly);
* jsonkeyPublic.withKid(jwk.kid());
* jsonkeyPublic.withKeyOps(Arrays.asList(JsonWebKeyOperation.ENCRYPT, JsonWebKeyOperation.WRAP_KEY,
* JsonWebKeyOperation.VERIFY));
* return jsonkeyPublic;
* }
*
* private String GetAccessToken(String authorization, String resource, boolean supportspop, JsonWebKey jwkPublic) {
* CloseableHttpClient httpclient = HttpClients.createDefault();
* HttpPost httppost = new HttpPost(authorization + "/oauth2/token");
*
*
* List<NameValuePair> params = new ArrayList<NameValuePair>(2);
* params.add(new BasicNameValuePair("resource", resource));
* params.add(new BasicNameValuePair("response_type", "token"));
* params.add(new BasicNameValuePair("grant_type", "client_credentials"));
* params.add(new BasicNameValuePair("client_id", this.getApplicationId()));
* params.add(new BasicNameValuePair("client_secret", this.getApplicationSecret()));
*
* if (supportspop) {
* params.add(new BasicNameValuePair("pop_jwk", jwkPublic.toString()));
* }
*
* httppost.setEntity(new UrlEncodedFormEntity(params, "UTF-8"));
*
* HttpResponse response = httpclient.execute(httppost);
* HttpEntity entity = response.getEntity();
*
*
* String content = EntityUtils.toString(entity);
*
* ObjectMapper mapper = new ObjectMapper();
* authreply reply = mapper.readValue(content, authreply.class);
*
* return reply.access_token;
* }
* </pre>
*
* <p>
* <b>Note: The client key must be securely stored. It's advised to use
* two client applications - one for development and other for
* production - managed by separate parties.</b>
* </p>
*/
public AuthenticationResult doAuthenticate(String authorization, String resource, String scope, String schema) {
return new AuthenticationResult(doAuthenticate(authorization, resource, scope), "");
}
} | class KeyVaultCredentials implements ServiceClientCredentials {
private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
private static final String BEARER_TOKEP_REFIX = "Bearer ";
private static final String CLIENT_ENCRYPTION_KEY_TYPE = "RSA";
private static final int CLIENT_ENCRYPTION_KEY_SIZE = 2048;
private List<String> supportedMethods = Arrays.asList("sign", "verify", "encrypt", "decrypt", "wrapkey",
"unwrapkey");
private JsonWebKey clientEncryptionKey = null;
private final ChallengeCache cache = new ChallengeCache();
@Override
/**
* Builds request with authenticated header. Protects request body if supported.
*
* @param originalRequest
* unprotected request without auth token.
* @param challengeMap
* the challenge map.
* @return Pair of protected request and HttpMessageSecurity used for
* encryption.
*/
private Pair<Request, HttpMessageSecurity> buildAuthenticatedRequest(Request originalRequest,
Map<String, String> challengeMap) throws IOException {
Boolean supportsPop = supportsMessageProtection(originalRequest.url().toString(), challengeMap);
if (supportsPop && this.clientEncryptionKey == null) {
try {
final KeyPairGenerator generator = KeyPairGenerator.getInstance(CLIENT_ENCRYPTION_KEY_TYPE);
generator.initialize(CLIENT_ENCRYPTION_KEY_SIZE);
this.clientEncryptionKey = JsonWebKey.fromRSA(generator.generateKeyPair()).withKid(UUID.randomUUID().toString());
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
AuthenticationResult authResult = getAuthenticationCredentials(supportsPop, challengeMap);
if (authResult == null) {
return null;
}
HttpMessageSecurity httpMessageSecurity = new HttpMessageSecurity(authResult.getAuthToken(),
supportsPop ? authResult.getPopKey() : "",
supportsPop ? challengeMap.get("x-ms-message-encryption-key") : "",
supportsPop ? challengeMap.get("x-ms-message-signing-key") : "",
this.clientEncryptionKey);
Request request = httpMessageSecurity.protectRequest(originalRequest);
return Pair.of(request, httpMessageSecurity);
}
/**
* Builds request with authenticated header. Protects request body if supported.
*
* @param originalRequest
* unprotected request without auth token.
* @param response
* response with unauthorized return code.
* @return Pair of protected request and HttpMessageSecurity used for
* encryption.
*/
private Pair<Request, HttpMessageSecurity> buildAuthenticatedRequest(Request originalRequest, Response response)
throws IOException {
String authenticateHeader = response.header(WWW_AUTHENTICATE);
Map<String, String> challengeMap = extractChallenge(authenticateHeader, BEARER_TOKEP_REFIX);
challengeMap.put("x-ms-message-encryption-key", response.header("x-ms-message-encryption-key"));
challengeMap.put("x-ms-message-signing-key", response.header("x-ms-message-signing-key"));
cache.addCachedChallenge(originalRequest.url(), challengeMap);
return buildAuthenticatedRequest(originalRequest, challengeMap);
}
/**
* Removes request body used for EKV authorization.
*
* @param request
* unprotected request without auth token.
* @return request with removed body.
*/
private Request buildEmptyRequest(Request request) {
RequestBody body = RequestBody.create(MediaType.parse("application/json"), "{}");
if (request.method().equalsIgnoreCase("get")) {
return request;
} else {
return request.newBuilder().method(request.method(), body).build();
}
}
/**
* Checks if resource supports message protection.
*
* @param url
* resource url.
* @param challengeMap
* the challenge map.
* @return true if message protection is supported.
*/
private Boolean supportsMessageProtection(String url, Map<String, String> challengeMap) {
if (!"true".equals(challengeMap.get("supportspop"))) {
return false;
}
if (!url.toLowerCase().contains("/keys/")) {
return false;
}
String[] tokens = url.split("\\?")[0].split("/");
return supportedMethods.contains(tokens[tokens.length - 1]);
}
/**
* Extracts the authentication challenges from the challenge map and calls the
* authentication callback to get the bearer token and return it.
*
* @param supportsPop
* is resource supports pop authentication.
* @param challengeMap
* the challenge map.
* @return AuthenticationResult with bearer token and PoP key.
*/
private AuthenticationResult getAuthenticationCredentials(Boolean supportsPop, Map<String, String> challengeMap) {
String authorization = challengeMap.get("authorization");
if (authorization == null) {
authorization = challengeMap.get("authorization_uri");
}
String resource = challengeMap.get("resource");
String scope = challengeMap.get("scope");
String schema = supportsPop ? "pop" : "bearer";
return doAuthenticate(authorization, resource, scope, schema);
}
/**
* Extracts the challenge off the authentication header.
*
* @param authenticateHeader
* the authentication header containing all the challenges.
* @param authChallengePrefix
* the authentication challenge name.
* @return a challenge map.
*/
private static Map<String, String> extractChallenge(String authenticateHeader, String authChallengePrefix) {
if (!isValidChallenge(authenticateHeader, authChallengePrefix)) {
return null;
}
authenticateHeader = authenticateHeader.toLowerCase().replace(authChallengePrefix.toLowerCase(), "");
String[] challenges = authenticateHeader.split(", ");
Map<String, String> challengeMap = new HashMap<String, String>();
for (String pair : challenges) {
String[] keyValue = pair.split("=");
challengeMap.put(keyValue[0].replaceAll("\"", ""), keyValue[1].replaceAll("\"", ""));
}
return challengeMap;
}
/**
* Verifies whether a challenge is bearer or not.
*
* @param authenticateHeader
* the authentication header containing all the challenges.
* @param authChallengePrefix
* the authentication challenge name.
* @return
*/
private static boolean isValidChallenge(String authenticateHeader, String authChallengePrefix) {
if (authenticateHeader != null && !authenticateHeader.isEmpty()
&& authenticateHeader.toLowerCase().startsWith(authChallengePrefix.toLowerCase())) {
return true;
}
return false;
}
/**
* Abstract method to be implemented.
*
* @param authorization
* Identifier of the authority, a URL.
* @param resource
* Identifier of the target resource that is the recipient of the
* requested token, a URL.
*
* @param scope
* The scope of the authentication request.
*
* @return AuthenticationResult with authorization token and PoP key.
*
* Answers a server challenge with a token header.
* <p>
* Implementations typically use ADAL to get a token, as performed in
* the sample below:
* </p>
*
* <pre>
* &
* public String doAuthenticate(String authorization, String resource, String scope) {
* String clientId = ...;
* String clientKey = ...;
* AuthenticationResult token = getAccessTokenFromClientCredentials(authorization, resource, clientId, clientKey);
* return token.getAccessToken();;
* }
*
* private static AuthenticationResult getAccessTokenFromClientCredentials(String authorization, String resource, String clientId, String clientKey) {
* AuthenticationContext context = null;
* AuthenticationResult result = null;
* ExecutorService service = null;
* try {
* service = Executors.newFixedThreadPool(1);
* context = new AuthenticationContext(authorization, false, service);
* ClientCredential credentials = new ClientCredential(clientId, clientKey);
* Future<AuthenticationResult> future = context.acquireToken(resource, credentials, null);
* result = future.get();
* } catch (Exception e) {
* throw new RuntimeException(e);
* } finally {
* service.shutdown();
* }
*
* if (result == null) {
* throw new RuntimeException("authentication result was null");
* }
* return result;
* }
* </pre>
*
* <p>
* <b>Note: The client key must be securely stored. It's advised to use
* two client applications - one for development and other for
* production - managed by separate parties.</b>
* </p>
*
*/
public String doAuthenticate(String authorization, String resource, String scope) {
return "";
}
/**
* Method to be implemented.
*
* @param authorization
* Identifier of the authority, a URL.
* @param resource
* Identifier of the target resource that is the recipient of the
* requested token, a URL.
* @param scope
* The scope of the authentication request.
*
* @param schema
* Authentication schema. Can be 'pop' or 'bearer'.
*
* @return AuthenticationResult with authorization token and PoP key.
*
* Answers a server challenge with a token header.
* <p>
* Implementations sends POST request to receive authentication token
* like in example below. ADAL currently doesn't support POP
* authentication.
* </p>
*
* <pre>
* public AuthenticationResult doAuthenticate(String authorization, String resource, String scope, String schema) {
* JsonWebKey clientJwk = GenerateJsonWebKey();
* JsonWebKey clientPublicJwk = GetJwkWithPublicKeyOnly(clientJwk);
* String token = GetAccessToken(authorization, resource, "pop".equals(schema), clientPublicJwk);
*
* return new AuthenticationResult(token, clientJwk.toString());
* }
*
* private JsonWebKey GenerateJsonWebKey() {
* final KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
* generator.initialize(2048);
* KeyPair clientRsaKeyPair = generator.generateKeyPair();
* JsonWebKey result = JsonWebKey.fromRSA(clientRsaKeyPair);
* result.withKid(UUID.randomUUID().toString());
* return result;
* }
*
* public static JsonWebKey GetJwkWithPublicKeyOnly(JsonWebKey jwk) {
* KeyPair publicOnly = jwk.toRSA(false);
* JsonWebKey jsonkeyPublic = JsonWebKey.fromRSA(publicOnly);
* jsonkeyPublic.withKid(jwk.kid());
* jsonkeyPublic.withKeyOps(Arrays.asList(JsonWebKeyOperation.ENCRYPT, JsonWebKeyOperation.WRAP_KEY,
* JsonWebKeyOperation.VERIFY));
* return jsonkeyPublic;
* }
*
* private String GetAccessToken(String authorization, String resource, boolean supportspop, JsonWebKey jwkPublic) {
* CloseableHttpClient httpclient = HttpClients.createDefault();
* HttpPost httppost = new HttpPost(authorization + "/oauth2/token");
*
*
* List<NameValuePair> params = new ArrayList<NameValuePair>(2);
* params.add(new BasicNameValuePair("resource", resource));
* params.add(new BasicNameValuePair("response_type", "token"));
* params.add(new BasicNameValuePair("grant_type", "client_credentials"));
* params.add(new BasicNameValuePair("client_id", this.getApplicationId()));
* params.add(new BasicNameValuePair("client_secret", this.getApplicationSecret()));
*
* if (supportspop) {
* params.add(new BasicNameValuePair("pop_jwk", jwkPublic.toString()));
* }
*
* httppost.setEntity(new UrlEncodedFormEntity(params, "UTF-8"));
*
* HttpResponse response = httpclient.execute(httppost);
* HttpEntity entity = response.getEntity();
*
*
* String content = EntityUtils.toString(entity);
*
* ObjectMapper mapper = new ObjectMapper();
* authreply reply = mapper.readValue(content, authreply.class);
*
* return reply.access_token;
* }
* </pre>
*
* <p>
* <b>Note: The client key must be securely stored. It's advised to use
* two client applications - one for development and other for
* production - managed by separate parties.</b>
* </p>
*/
public AuthenticationResult doAuthenticate(String authorization, String resource, String scope, String schema) {
return new AuthenticationResult(doAuthenticate(authorization, resource, scope), "");
}
} |
```suggestion .collect(Collectors.joining(", ", "(", ")")); ``` | public String toSql() {
return compareExpr.toSql() + " IN " + optionsList.stream()
.map(Expression::toSql)
.collect(Collectors.joining(",", "(", ")"));
} | .collect(Collectors.joining(",", "(", ")")); | public String toSql() {
return compareExpr.toSql() + " IN " + options.stream()
.map(Expression::toSql)
.collect(Collectors.joining(", ", "(", ")"));
} | class InPredicate extends Expression {
private Expression compareExpr;
private List<Expression> optionsList;
public InPredicate(Expression compareExpr, List<Expression> optionsList) {
super(new Builder<Expression>().add(compareExpr).addAll(optionsList).build().toArray(new Expression[0]));
this.compareExpr = Objects.requireNonNull(compareExpr, "Compare Expr cannot be null");
this.optionsList = ImmutableList.copyOf(Objects.requireNonNull(optionsList, "In list cannot be null"));
}
public <R, C> R accept(ExpressionVisitor<R, C> visitor, C context) {
return visitor.visitInPredicate(this, context);
}
@Override
public DataType getDataType() throws UnboundException {
return BooleanType.INSTANCE;
}
@Override
public boolean nullable() throws UnboundException {
return children().stream().anyMatch(Expression::nullable);
}
@Override
public String toString() {
return compareExpr + " IN " + optionsList.stream()
.map(Expression::toString)
.collect(Collectors.joining(",", "(", ")"));
}
@Override
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InPredicate that = (InPredicate) o;
return Objects.equals(compareExpr, that.getCompareExpr())
&& Objects.equals(optionsList, that.getOptionsList());
}
@Override
public int hashCode() {
return Objects.hash(compareExpr, optionsList);
}
public Expression getCompareExpr() {
return compareExpr;
}
public List<Expression> getOptionsList() {
return optionsList;
}
} | class InPredicate extends Expression {
private final Expression compareExpr;
private final List<Expression> options;
public InPredicate(Expression compareExpr, List<Expression> optionsList) {
super(new Builder<Expression>().add(compareExpr).addAll(optionsList).build().toArray(new Expression[0]));
this.compareExpr = Objects.requireNonNull(compareExpr, "Compare Expr cannot be null");
this.options = ImmutableList.copyOf(Objects.requireNonNull(optionsList, "In list cannot be null"));
}
public <R, C> R accept(ExpressionVisitor<R, C> visitor, C context) {
return visitor.visitInPredicate(this, context);
}
@Override
public DataType getDataType() throws UnboundException {
return BooleanType.INSTANCE;
}
@Override
public boolean nullable() throws UnboundException {
return children().stream().anyMatch(Expression::nullable);
}
@Override
public Expression withChildren(List<Expression> children) {
Preconditions.checkArgument(children.size() > 1);
return new InPredicate(children.get(0), ImmutableList.copyOf(children).subList(1, children.size()));
}
@Override
public String toString() {
return compareExpr + " IN " + options.stream()
.map(Expression::toString)
.collect(Collectors.joining(", ", "(", ")"));
}
@Override
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InPredicate that = (InPredicate) o;
return Objects.equals(compareExpr, that.getCompareExpr())
&& Objects.equals(options, that.getOptions());
}
@Override
public int hashCode() {
return Objects.hash(compareExpr, options);
}
public Expression getCompareExpr() {
return compareExpr;
}
public List<Expression> getOptions() {
return options;
}
} |
Resolving as per the offline discussion. | public void visit(BLangLetExpression letExpression) {
SymbolEnv prevEnv = this.env;
letExpression.env.enclInvokable = this.env.enclInvokable;
this.env = letExpression.env;
BLangExpression expr = letExpression.expr;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos);
blockStmt.scope = letExpression.env.scope;
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env);
if (node.getKind() == NodeKind.BLOCK) {
blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts);
} else {
blockStmt.addStatement((BLangSimpleVariableDef) node);
}
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++),
expr.getBType(), expr, expr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(expr.getBType());
result = rewrite(stmtExpr, env);
this.env = prevEnv;
} | letExpression.env.enclInvokable = this.env.enclInvokable; | public void visit(BLangLetExpression letExpression) {
SymbolEnv prevEnv = this.env;
letExpression.env.enclInvokable = this.env.enclInvokable;
this.env = letExpression.env;
BLangExpression expr = letExpression.expr;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos);
blockStmt.scope = letExpression.env.scope;
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env);
if (node.getKind() == NodeKind.BLOCK) {
blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts);
} else {
blockStmt.addStatement((BLangSimpleVariableDef) node);
}
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++),
expr.getBType(), expr, expr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(expr.getBType());
result = rewrite(stmtExpr, env);
this.env = prevEnv;
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
Thanks, we'll need to add proper tests for this. | private DefaultArtifactSources processJarPluginExecutionConfig(PluginExecution e, boolean test) {
final Object config = e.getConfiguration();
if (config == null || !(config instanceof Xpp3Dom)) {
return null;
}
Xpp3Dom dom = (Xpp3Dom) config;
PathFilter filter = null;
final Xpp3Dom includes = dom.getChild("includes");
if (includes != null) {
final Xpp3Dom[] includeElements = includes.getChildren();
final List<String> list = new ArrayList<>(includeElements.length);
for (Xpp3Dom include : includeElements) {
list.add(include.getValue());
}
filter = PathFilter.forIncludes(list);
} else {
final Xpp3Dom excludes = dom.getChild("excludes");
if (excludes != null) {
final Xpp3Dom[] excludeElements = excludes.getChildren();
final List<String> list = new ArrayList<>(excludeElements.length);
for (Xpp3Dom exclude : excludes.getChildren()) {
list.add(exclude.getValue());
}
filter = PathFilter.forExcludes(list);
}
}
if (filter != null) {
final String classifier = getClassifier(dom, test);
final DefaultArtifactSources src = new DefaultArtifactSources(classifier);
src.addSources(
new DefaultSourceDir(new DirectoryPathTree(test ? getTestSourcesSourcesDir() : getSourcesSourcesDir()),
new DirectoryPathTree(test ? getTestClassesDir() : getClassesDir(), filter),
Collections.emptyMap()));
if (test) {
addTestResources(src, filter);
} else {
addMainResources(src, filter);
}
return src;
}
return null;
} | } else { | private DefaultArtifactSources processJarPluginExecutionConfig(PluginExecution e, boolean test) {
final Object config = e.getConfiguration();
if (config == null || !(config instanceof Xpp3Dom)) {
return null;
}
Xpp3Dom dom = (Xpp3Dom) config;
final List<String> includes = collectChildValues(dom.getChild("includes"));
final List<String> excludes = collectChildValues(dom.getChild("excludes"));
if (includes == null && excludes == null) {
return null;
}
final PathFilter filter = new PathFilter(includes, excludes);
final String classifier = getClassifier(dom, test);
final Collection<SourceDir> sources = Collections.singletonList(
new DefaultSourceDir(new DirectoryPathTree(test ? getTestSourcesSourcesDir() : getSourcesSourcesDir()),
new DirectoryPathTree(test ? getTestClassesDir() : getClassesDir(), filter),
Collections.emptyMap()));
final Collection<SourceDir> resources = test ? collectTestResources(filter) : collectMainResources(filter);
return new DefaultArtifactSources(classifier, sources, resources);
} | class LocalProject {
public static final String PROJECT_GROUPID = "${project.groupId}";
private static final String PROJECT_BASEDIR = "${project.basedir}";
private static final String PROJECT_BUILD_DIR = "${project.build.directory}";
private static final String PROJECT_OUTPUT_DIR = "${project.build.outputDirectory}";
public static final String POM_XML = "pom.xml";
public static LocalProject load(Path path) throws BootstrapMavenException {
return load(path, true);
}
public static LocalProject load(Path path, boolean required) throws BootstrapMavenException {
final Path pom = locateCurrentProjectPom(path, required);
if (pom == null) {
return null;
}
try {
return new LocalProject(readModel(pom), null);
} catch (UnresolvedVersionException e) {
return loadWorkspace(pom);
}
}
public static LocalProject loadWorkspace(Path path) throws BootstrapMavenException {
return loadWorkspace(path, true);
}
public static LocalProject loadWorkspace(Path path, boolean required) throws BootstrapMavenException {
try {
return new WorkspaceLoader(null, path.normalize().toAbsolutePath()).load();
} catch (Exception e) {
if (required) {
throw e;
}
return null;
}
}
/**
* Loads the workspace the current project belongs to.
* If current project does not exist then the method will return null.
*
* @param ctx bootstrap maven context
* @return current project with the workspace or null in case the current project could not be resolved
* @throws BootstrapMavenException in case of an error
*/
public static LocalProject loadWorkspace(BootstrapMavenContext ctx) throws BootstrapMavenException {
final Path currentProjectPom = ctx.getCurrentProjectPomOrNull();
if (currentProjectPom == null) {
return null;
}
final Path rootProjectBaseDir = ctx.getRootProjectBaseDir();
final WorkspaceLoader wsLoader = new WorkspaceLoader(ctx, currentProjectPom);
if (rootProjectBaseDir != null && !rootProjectBaseDir.equals(currentProjectPom.getParent())) {
wsLoader.setWorkspaceRootPom(rootProjectBaseDir.resolve(POM_XML));
}
return wsLoader.load();
}
static final Model readModel(Path pom) throws BootstrapMavenException {
try {
final Model model = ModelUtils.readModel(pom);
model.setPomFile(pom.toFile());
return model;
} catch (IOException e) {
throw new BootstrapMavenException("Failed to read " + pom, e);
}
}
static Path locateCurrentProjectPom(Path path, boolean required) throws BootstrapMavenException {
Path p = path;
while (p != null) {
final Path pom = p.resolve(POM_XML);
if (Files.exists(pom)) {
return pom;
}
p = p.getParent();
}
if (required) {
throw new BootstrapMavenException("Failed to locate project pom.xml for " + path);
}
return null;
}
private final Model rawModel;
private final String groupId;
private final String artifactId;
private String version;
private final Path dir;
private final LocalWorkspace workspace;
final List<LocalProject> modules = new ArrayList<>(0);
private AppArtifactKey key;
private final ModelBuildingResult modelBuildingResult;
private WorkspaceModule module;
LocalProject(ModelBuildingResult modelBuildingResult, LocalWorkspace workspace) {
this.rawModel = modelBuildingResult.getRawModel();
final Model effectiveModel = modelBuildingResult.getEffectiveModel();
this.groupId = effectiveModel.getGroupId();
this.artifactId = effectiveModel.getArtifactId();
this.version = effectiveModel.getVersion();
this.dir = effectiveModel.getProjectDirectory().toPath();
this.modelBuildingResult = modelBuildingResult;
this.workspace = workspace;
if (workspace != null) {
workspace.addProject(this, rawModel.getPomFile().lastModified());
}
}
LocalProject(Model rawModel, LocalWorkspace workspace) throws BootstrapMavenException {
this.modelBuildingResult = null;
this.rawModel = rawModel;
this.dir = rawModel.getProjectDirectory().toPath();
this.workspace = workspace;
this.groupId = ModelUtils.getGroupId(rawModel);
this.artifactId = rawModel.getArtifactId();
final String rawVersion = ModelUtils.getRawVersion(rawModel);
final boolean rawVersionIsUnresolved = ModelUtils.isUnresolvedVersion(rawVersion);
version = rawVersionIsUnresolved ? ModelUtils.resolveVersion(rawVersion, rawModel) : rawVersion;
if (workspace != null) {
workspace.addProject(this, rawModel.getPomFile().lastModified());
if (rawVersionIsUnresolved && version != null) {
workspace.setResolvedVersion(version);
}
} else if (version == null && rawVersionIsUnresolved) {
throw UnresolvedVersionException.forGa(groupId, artifactId, rawVersion);
}
}
public LocalProject getLocalParent() {
if (workspace == null) {
return null;
}
final Parent parent = rawModel.getParent();
if (parent == null) {
return null;
}
return workspace.getProject(parent.getGroupId(), parent.getArtifactId());
}
public String getGroupId() {
return groupId;
}
public String getArtifactId() {
return artifactId;
}
public String getVersion() {
if (version != null) {
return version;
}
if (workspace != null) {
version = workspace.getResolvedVersion();
}
if (version == null) {
throw UnresolvedVersionException.forGa(groupId, artifactId, ModelUtils.getRawVersion(rawModel));
}
return version;
}
public Path getDir() {
return dir;
}
public Path getOutputDir() {
return modelBuildingResult == null
? resolveRelativeToBaseDir(configuredBuildDir(this, build -> build.getDirectory()), "target")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getDirectory());
}
public Path getCodeGenOutputDir() {
return getOutputDir().resolve("generated-sources");
}
public Path getClassesDir() {
return modelBuildingResult == null
? resolveRelativeToBuildDir(configuredBuildDir(this, build -> build.getOutputDirectory()), "classes")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getOutputDirectory());
}
public Path getTestClassesDir() {
return modelBuildingResult == null
? resolveRelativeToBuildDir(configuredBuildDir(this, build -> build.getTestOutputDirectory()), "test-classes")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getTestOutputDirectory());
}
public Path getSourcesSourcesDir() {
return modelBuildingResult == null
? resolveRelativeToBaseDir(configuredBuildDir(this, build -> build.getSourceDirectory()), "src/main/java")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getSourceDirectory());
}
public Path getTestSourcesSourcesDir() {
return resolveRelativeToBaseDir(configuredBuildDir(this, build -> build.getTestSourceDirectory()), "src/test/java");
}
public Path getSourcesDir() {
return getSourcesSourcesDir().getParent();
}
public PathCollection getResourcesSourcesDirs() {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getResources();
if (resources.isEmpty()) {
return PathList.of(resolveRelativeToBaseDir(null, "src/main/resources"));
}
return PathList.from(resources.stream()
.map(Resource::getDirectory)
.map(resourcesDir -> resolveRelativeToBaseDir(resourcesDir, "src/main/resources"))
.collect(Collectors.toCollection(LinkedHashSet::new)));
}
public PathCollection getTestResourcesSourcesDirs() {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getTestResources();
if (resources.isEmpty()) {
return PathList.of(resolveRelativeToBaseDir(null, "src/test/resources"));
}
return PathList.from(resources.stream()
.map(Resource::getDirectory)
.map(resourcesDir -> resolveRelativeToBaseDir(resourcesDir, "src/test/resources"))
.collect(Collectors.toCollection(LinkedHashSet::new)));
}
public ModelBuildingResult getModelBuildingResult() {
return modelBuildingResult;
}
public Model getRawModel() {
return rawModel;
}
public LocalWorkspace getWorkspace() {
return workspace;
}
public AppArtifactKey getKey() {
return key == null ? key = new AppArtifactKey(groupId, artifactId) : key;
}
public AppArtifact getAppArtifact() {
return getAppArtifact(
modelBuildingResult == null ? rawModel.getPackaging() : modelBuildingResult.getEffectiveModel().getPackaging());
}
public AppArtifact getAppArtifact(String extension) {
return new AppArtifact(groupId, artifactId, "", extension, getVersion());
}
public Path resolveRelativeToBaseDir(String path) {
return resolveRelativeToBaseDir(path, null);
}
private Path resolveRelativeToBaseDir(String path, String defaultPath) {
return dir.resolve(path == null ? defaultPath : stripProjectBasedirPrefix(path, PROJECT_BASEDIR));
}
private Path resolveRelativeToBuildDir(String path, String defaultPath) {
return getOutputDir().resolve(path == null ? defaultPath : stripProjectBasedirPrefix(path, PROJECT_BUILD_DIR));
}
private static String stripProjectBasedirPrefix(String path, String expr) {
return path.startsWith(expr) ? path.substring(expr.length() + 1) : path;
}
private static String configuredBuildDir(LocalProject project, Function<Build, String> f) {
String dir = project.rawModel.getBuild() == null ? null : f.apply(project.rawModel.getBuild());
while (dir == null) {
project = project.getLocalParent();
if (project == null) {
break;
}
if (project.rawModel.getBuild() != null) {
dir = f.apply(project.rawModel.getBuild());
}
}
return dir;
}
public WorkspaceModule toWorkspaceModule() {
if (module != null) {
return module;
}
final DefaultWorkspaceModule module = new DefaultWorkspaceModule(
new GAV(getKey().getGroupId(), getKey().getArtifactId(), getVersion()), dir.toFile(), getOutputDir().toFile());
final Build build = (modelBuildingResult == null ? getRawModel() : modelBuildingResult.getEffectiveModel()).getBuild();
if (build != null && !build.getPlugins().isEmpty()) {
for (Plugin plugin : build.getPlugins()) {
if (!plugin.getArtifactId().equals("maven-jar-plugin")) {
continue;
}
for (PluginExecution e : plugin.getExecutions()) {
DefaultArtifactSources src = null;
if (e.getGoals().contains(GACTV.TYPE_JAR)) {
src = processJarPluginExecutionConfig(e, false);
} else if (e.getGoals().contains("test-jar")) {
src = processJarPluginExecutionConfig(e, true);
}
if (src != null) {
module.addArtifactSources(src);
}
}
}
}
if (module.getMainSources() == null) {
final DefaultArtifactSources src = new DefaultArtifactSources(DefaultWorkspaceModule.MAIN);
src.addSources(new DefaultSourceDir(getSourcesSourcesDir().toFile(), getClassesDir().toFile()));
addMainResources(src, null);
module.addArtifactSources(src);
}
if (module.getTestSources() == null) {
final DefaultArtifactSources src = new DefaultArtifactSources(DefaultWorkspaceModule.TEST);
src.addSources(new DefaultSourceDir(getTestSourcesSourcesDir().toFile(), getTestClassesDir().toFile()));
addTestResources(src, null);
module.addArtifactSources(src);
}
module.setBuildFiles(PathList.of(getRawModel().getPomFile().toPath()));
return this.module = module;
}
private static String getClassifier(Xpp3Dom dom, boolean test) {
final Xpp3Dom classifier = dom.getChild("classifier");
return classifier == null ? (test ? DefaultWorkspaceModule.TEST : DefaultWorkspaceModule.MAIN) : classifier.getValue();
}
private void addMainResources(DefaultArtifactSources module, PathFilter filter) {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getResources();
if (resources.isEmpty()) {
module.addResources(new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(null, "src/main/resources")),
new DirectoryPathTree(getClassesDir(), filter), Collections.emptyMap()));
} else {
for (Resource r : resources) {
module.addResources(
new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(r.getDirectory(), "src/main/resources")),
new DirectoryPathTree((r.getTargetPath() == null ? getClassesDir()
: getClassesDir()
.resolve(stripProjectBasedirPrefix(r.getTargetPath(), PROJECT_OUTPUT_DIR))),
filter),
Collections.emptyMap()));
}
}
}
private void addTestResources(DefaultArtifactSources module, PathFilter filter) {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getTestResources();
if (resources.isEmpty()) {
module.addResources(new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(null, "src/test/resources")),
new DirectoryPathTree(getTestClassesDir(), filter), Collections.emptyMap()));
} else {
for (Resource r : resources) {
module.addResources(
new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(r.getDirectory(), "src/test/resources")),
new DirectoryPathTree((r.getTargetPath() == null ? getTestClassesDir()
: getTestClassesDir()
.resolve(stripProjectBasedirPrefix(r.getTargetPath(), PROJECT_OUTPUT_DIR))),
filter),
Collections.emptyMap()));
}
}
}
} | class LocalProject {
public static final String PROJECT_GROUPID = "${project.groupId}";
private static final String PROJECT_BASEDIR = "${project.basedir}";
private static final String PROJECT_BUILD_DIR = "${project.build.directory}";
private static final String PROJECT_OUTPUT_DIR = "${project.build.outputDirectory}";
public static final String POM_XML = "pom.xml";
public static LocalProject load(Path path) throws BootstrapMavenException {
return load(path, true);
}
public static LocalProject load(Path path, boolean required) throws BootstrapMavenException {
final Path pom = locateCurrentProjectPom(path, required);
if (pom == null) {
return null;
}
try {
return new LocalProject(readModel(pom), null);
} catch (UnresolvedVersionException e) {
return loadWorkspace(pom);
}
}
public static LocalProject loadWorkspace(Path path) throws BootstrapMavenException {
return loadWorkspace(path, true);
}
public static LocalProject loadWorkspace(Path path, boolean required) throws BootstrapMavenException {
try {
return new WorkspaceLoader(null, path.normalize().toAbsolutePath()).load();
} catch (Exception e) {
if (required) {
throw e;
}
return null;
}
}
/**
* Loads the workspace the current project belongs to.
* If current project does not exist then the method will return null.
*
* @param ctx bootstrap maven context
* @return current project with the workspace or null in case the current project could not be resolved
* @throws BootstrapMavenException in case of an error
*/
public static LocalProject loadWorkspace(BootstrapMavenContext ctx) throws BootstrapMavenException {
final Path currentProjectPom = ctx.getCurrentProjectPomOrNull();
if (currentProjectPom == null) {
return null;
}
final Path rootProjectBaseDir = ctx.getRootProjectBaseDir();
final WorkspaceLoader wsLoader = new WorkspaceLoader(ctx, currentProjectPom);
if (rootProjectBaseDir != null && !rootProjectBaseDir.equals(currentProjectPom.getParent())) {
wsLoader.setWorkspaceRootPom(rootProjectBaseDir.resolve(POM_XML));
}
return wsLoader.load();
}
static final Model readModel(Path pom) throws BootstrapMavenException {
try {
final Model model = ModelUtils.readModel(pom);
model.setPomFile(pom.toFile());
return model;
} catch (IOException e) {
throw new BootstrapMavenException("Failed to read " + pom, e);
}
}
static Path locateCurrentProjectPom(Path path, boolean required) throws BootstrapMavenException {
Path p = path;
while (p != null) {
final Path pom = p.resolve(POM_XML);
if (Files.exists(pom)) {
return pom;
}
p = p.getParent();
}
if (required) {
throw new BootstrapMavenException("Failed to locate project pom.xml for " + path);
}
return null;
}
private final Model rawModel;
private final String groupId;
private final String artifactId;
private String version;
private final Path dir;
private final LocalWorkspace workspace;
final List<LocalProject> modules = new ArrayList<>(0);
private AppArtifactKey key;
private final ModelBuildingResult modelBuildingResult;
private WorkspaceModule module;
LocalProject(ModelBuildingResult modelBuildingResult, LocalWorkspace workspace) {
this.rawModel = modelBuildingResult.getRawModel();
final Model effectiveModel = modelBuildingResult.getEffectiveModel();
this.groupId = effectiveModel.getGroupId();
this.artifactId = effectiveModel.getArtifactId();
this.version = effectiveModel.getVersion();
this.dir = effectiveModel.getProjectDirectory().toPath();
this.modelBuildingResult = modelBuildingResult;
this.workspace = workspace;
if (workspace != null) {
workspace.addProject(this, rawModel.getPomFile().lastModified());
}
}
LocalProject(Model rawModel, LocalWorkspace workspace) throws BootstrapMavenException {
this.modelBuildingResult = null;
this.rawModel = rawModel;
this.dir = rawModel.getProjectDirectory().toPath();
this.workspace = workspace;
this.groupId = ModelUtils.getGroupId(rawModel);
this.artifactId = rawModel.getArtifactId();
final String rawVersion = ModelUtils.getRawVersion(rawModel);
final boolean rawVersionIsUnresolved = ModelUtils.isUnresolvedVersion(rawVersion);
version = rawVersionIsUnresolved ? ModelUtils.resolveVersion(rawVersion, rawModel) : rawVersion;
if (workspace != null) {
workspace.addProject(this, rawModel.getPomFile().lastModified());
if (rawVersionIsUnresolved && version != null) {
workspace.setResolvedVersion(version);
}
} else if (version == null && rawVersionIsUnresolved) {
throw UnresolvedVersionException.forGa(groupId, artifactId, rawVersion);
}
}
public LocalProject getLocalParent() {
if (workspace == null) {
return null;
}
final Parent parent = rawModel.getParent();
if (parent == null) {
return null;
}
return workspace.getProject(parent.getGroupId(), parent.getArtifactId());
}
public String getGroupId() {
return groupId;
}
public String getArtifactId() {
return artifactId;
}
public String getVersion() {
if (version != null) {
return version;
}
if (workspace != null) {
version = workspace.getResolvedVersion();
}
if (version == null) {
throw UnresolvedVersionException.forGa(groupId, artifactId, ModelUtils.getRawVersion(rawModel));
}
return version;
}
public Path getDir() {
return dir;
}
public Path getOutputDir() {
return modelBuildingResult == null
? resolveRelativeToBaseDir(configuredBuildDir(this, build -> build.getDirectory()), "target")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getDirectory());
}
public Path getCodeGenOutputDir() {
return getOutputDir().resolve("generated-sources");
}
public Path getClassesDir() {
return modelBuildingResult == null
? resolveRelativeToBuildDir(configuredBuildDir(this, build -> build.getOutputDirectory()), "classes")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getOutputDirectory());
}
public Path getTestClassesDir() {
return modelBuildingResult == null
? resolveRelativeToBuildDir(configuredBuildDir(this, build -> build.getTestOutputDirectory()), "test-classes")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getTestOutputDirectory());
}
public Path getSourcesSourcesDir() {
return modelBuildingResult == null
? resolveRelativeToBaseDir(configuredBuildDir(this, build -> build.getSourceDirectory()), "src/main/java")
: Paths.get(modelBuildingResult.getEffectiveModel().getBuild().getSourceDirectory());
}
public Path getTestSourcesSourcesDir() {
return resolveRelativeToBaseDir(configuredBuildDir(this, build -> build.getTestSourceDirectory()), "src/test/java");
}
public Path getSourcesDir() {
return getSourcesSourcesDir().getParent();
}
public PathCollection getResourcesSourcesDirs() {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getResources();
if (resources.isEmpty()) {
return PathList.of(resolveRelativeToBaseDir(null, "src/main/resources"));
}
return PathList.from(resources.stream()
.map(Resource::getDirectory)
.map(resourcesDir -> resolveRelativeToBaseDir(resourcesDir, "src/main/resources"))
.collect(Collectors.toCollection(LinkedHashSet::new)));
}
public PathCollection getTestResourcesSourcesDirs() {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getTestResources();
if (resources.isEmpty()) {
return PathList.of(resolveRelativeToBaseDir(null, "src/test/resources"));
}
return PathList.from(resources.stream()
.map(Resource::getDirectory)
.map(resourcesDir -> resolveRelativeToBaseDir(resourcesDir, "src/test/resources"))
.collect(Collectors.toCollection(LinkedHashSet::new)));
}
public ModelBuildingResult getModelBuildingResult() {
return modelBuildingResult;
}
public Model getRawModel() {
return rawModel;
}
public LocalWorkspace getWorkspace() {
return workspace;
}
public AppArtifactKey getKey() {
return key == null ? key = new AppArtifactKey(groupId, artifactId) : key;
}
public AppArtifact getAppArtifact() {
return getAppArtifact(
modelBuildingResult == null ? rawModel.getPackaging() : modelBuildingResult.getEffectiveModel().getPackaging());
}
public AppArtifact getAppArtifact(String extension) {
return new AppArtifact(groupId, artifactId, "", extension, getVersion());
}
public Path resolveRelativeToBaseDir(String path) {
return resolveRelativeToBaseDir(path, null);
}
private Path resolveRelativeToBaseDir(String path, String defaultPath) {
return dir.resolve(path == null ? defaultPath : stripProjectBasedirPrefix(path, PROJECT_BASEDIR));
}
private Path resolveRelativeToBuildDir(String path, String defaultPath) {
return getOutputDir().resolve(path == null ? defaultPath : stripProjectBasedirPrefix(path, PROJECT_BUILD_DIR));
}
private static String stripProjectBasedirPrefix(String path, String expr) {
return path.startsWith(expr) ? path.substring(expr.length() + 1) : path;
}
private static String configuredBuildDir(LocalProject project, Function<Build, String> f) {
String dir = project.rawModel.getBuild() == null ? null : f.apply(project.rawModel.getBuild());
while (dir == null) {
project = project.getLocalParent();
if (project == null) {
break;
}
if (project.rawModel.getBuild() != null) {
dir = f.apply(project.rawModel.getBuild());
}
}
return dir;
}
public WorkspaceModule toWorkspaceModule() {
if (module != null) {
return module;
}
final DefaultWorkspaceModule module = new DefaultWorkspaceModule(
new GAV(getKey().getGroupId(), getKey().getArtifactId(), getVersion()), dir.toFile(), getOutputDir().toFile());
final Build build = (modelBuildingResult == null ? getRawModel() : modelBuildingResult.getEffectiveModel()).getBuild();
if (build != null && !build.getPlugins().isEmpty()) {
for (Plugin plugin : build.getPlugins()) {
if (!plugin.getArtifactId().equals("maven-jar-plugin")) {
continue;
}
for (PluginExecution e : plugin.getExecutions()) {
DefaultArtifactSources src = null;
if (e.getGoals().contains(ArtifactCoords.TYPE_JAR)) {
src = processJarPluginExecutionConfig(e, false);
} else if (e.getGoals().contains("test-jar")) {
src = processJarPluginExecutionConfig(e, true);
}
if (src != null) {
module.addArtifactSources(src);
}
}
}
}
if (module.getMainSources() == null) {
module.addArtifactSources(new DefaultArtifactSources(DefaultWorkspaceModule.MAIN,
Collections.singletonList(new DefaultSourceDir(getSourcesSourcesDir().toFile(), getClassesDir().toFile())),
collectMainResources(null)));
}
if (module.getTestSources() == null) {
module.addArtifactSources(new DefaultArtifactSources(DefaultWorkspaceModule.TEST,
Collections.singletonList(
new DefaultSourceDir(getTestSourcesSourcesDir().toFile(), getTestClassesDir().toFile())),
collectTestResources(null)));
}
module.setBuildFiles(PathList.of(getRawModel().getPomFile().toPath()));
return this.module = module;
}
private List<String> collectChildValues(final Xpp3Dom container) {
if (container == null) {
return null;
}
final Xpp3Dom[] excludeElements = container.getChildren();
final List<String> list = new ArrayList<>(excludeElements.length);
for (Xpp3Dom child : container.getChildren()) {
list.add(child.getValue());
}
return list;
}
private static String getClassifier(Xpp3Dom dom, boolean test) {
final Xpp3Dom classifier = dom.getChild("classifier");
return classifier == null ? (test ? DefaultWorkspaceModule.TEST : DefaultWorkspaceModule.MAIN) : classifier.getValue();
}
private Collection<SourceDir> collectMainResources(PathFilter filter) {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getResources();
if (resources.isEmpty()) {
return Collections.singletonList(new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(null, "src/main/resources")),
new DirectoryPathTree(getClassesDir(), filter), Collections.emptyMap()));
}
final List<SourceDir> sourceDirs = new ArrayList<>(resources.size());
for (Resource r : resources) {
sourceDirs.add(
new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(r.getDirectory(), "src/main/resources")),
new DirectoryPathTree((r.getTargetPath() == null ? getClassesDir()
: getClassesDir()
.resolve(stripProjectBasedirPrefix(r.getTargetPath(), PROJECT_OUTPUT_DIR))),
filter),
Collections.emptyMap()));
}
return sourceDirs;
}
private Collection<SourceDir> collectTestResources(PathFilter filter) {
final List<Resource> resources = rawModel.getBuild() == null ? Collections.emptyList()
: rawModel.getBuild().getTestResources();
if (resources.isEmpty()) {
return Collections.singletonList(new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(null, "src/test/resources")),
new DirectoryPathTree(getTestClassesDir(), filter), Collections.emptyMap()));
}
final List<SourceDir> sourceDirs = new ArrayList<>(resources.size());
for (Resource r : resources) {
sourceDirs.add(
new DefaultSourceDir(
new DirectoryPathTree(resolveRelativeToBaseDir(r.getDirectory(), "src/test/resources")),
new DirectoryPathTree((r.getTargetPath() == null ? getTestClassesDir()
: getTestClassesDir()
.resolve(stripProjectBasedirPrefix(r.getTargetPath(), PROJECT_OUTPUT_DIR))),
filter),
Collections.emptyMap()));
}
return sourceDirs;
}
} |
Please improve the error message specifying that withSnowPipe is required for streaming / unbounded PCollections | private void checkArguments(PCollection<T> input) {
checkArgument(getStagingBucketName() != null, "withStagingBucketName is required");
checkArgument(getUserDataMapper() != null, "withUserDataMapper() is required");
checkArgument(
(getDataSourceProviderFn() != null),
"withDataSourceConfiguration() or withDataSourceProviderFn() is required");
if (input.isBounded() == PCollection.IsBounded.UNBOUNDED) {
checkArgument(getSnowPipe() != null, "withSnowPipe() is required");
} else {
checkArgument(getTable() != null, "to() is required");
}
} | checkArgument(getSnowPipe() != null, "withSnowPipe() is required"); | private void checkArguments(PCollection<T> input) {
checkArgument(getStagingBucketName() != null, "withStagingBucketName is required");
checkArgument(getUserDataMapper() != null, "withUserDataMapper() is required");
checkArgument(
(getDataSourceProviderFn() != null),
"withDataSourceConfiguration() or withDataSourceProviderFn() is required");
if (input.isBounded() == PCollection.IsBounded.UNBOUNDED) {
checkArgument(
getSnowPipe() != null,
"in streaming (unbounded) write it is required to specify SnowPipe name via withSnowPipe() method.");
} else {
checkArgument(
getTable() != null,
"in batch writing it is required to specify destination table name via to() method.");
}
} | class Builder<T> {
abstract Builder<T> setDataSourceProviderFn(
SerializableFunction<Void, DataSource> dataSourceProviderFn);
abstract Builder<T> setTable(String table);
abstract Builder<T> setStorageIntegrationName(String storageIntegrationName);
abstract Builder<T> setStagingBucketName(String stagingBucketName);
abstract Builder<T> setQuery(String query);
abstract Builder<T> setSnowPipe(ValueProvider<String> snowPipe);
abstract Builder<T> setFlushRowLimit(Integer rowsCount);
abstract Builder<T> setShardsNumber(Integer shardsNumber);
abstract Builder<T> setFlushTimeLimit(Duration triggeringFrequency);
abstract Builder<T> setFileNameTemplate(String fileNameTemplate);
abstract Builder<T> setUserDataMapper(UserDataMapper userDataMapper);
abstract Builder<T> setWriteDisposition(WriteDisposition writeDisposition);
abstract Builder<T> setSnowflakeService(SnowflakeService snowflakeService);
abstract Builder<T> setQuotationMark(String quotationMark);
abstract Builder<T> setDebugMode(StreamingLogLevel debugLevel);
abstract Write<T> build();
} | class Builder<T> {
abstract Builder<T> setDataSourceProviderFn(
SerializableFunction<Void, DataSource> dataSourceProviderFn);
abstract Builder<T> setTable(String table);
abstract Builder<T> setStorageIntegrationName(String storageIntegrationName);
abstract Builder<T> setStagingBucketName(String stagingBucketName);
abstract Builder<T> setQuery(String query);
abstract Builder<T> setSnowPipe(ValueProvider<String> snowPipe);
abstract Builder<T> setFlushRowLimit(Integer rowsCount);
abstract Builder<T> setShardsNumber(Integer shardsNumber);
abstract Builder<T> setFlushTimeLimit(Duration triggeringFrequency);
abstract Builder<T> setFileNameTemplate(String fileNameTemplate);
abstract Builder<T> setUserDataMapper(UserDataMapper userDataMapper);
abstract Builder<T> setWriteDisposition(WriteDisposition writeDisposition);
abstract Builder<T> setCreateDisposition(CreateDisposition createDisposition);
abstract Builder<T> setTableSchema(SnowflakeTableSchema tableSchema);
abstract Builder<T> setSnowflakeService(SnowflakeService snowflakeService);
abstract Builder<T> setQuotationMark(String quotationMark);
abstract Builder<T> setDebugMode(StreamingLogLevel debugLevel);
abstract Write<T> build();
} |
Is `targetDatabaseName = targetDatabase.getName();` necessary? Migration is compatible with upper and lower database name. | public void executeUpdate(final MigrateTableStatement sqlStatement, final ContextManager contextManager) {
InstanceContext instanceContext = contextManager.getInstanceContext();
ShardingSpherePreconditions.checkState(instanceContext.isCluster(),
() -> new PipelineInvalidParameterException(String.format("Only `Cluster` is supported now, but current mode type is `%s`", instanceContext.getModeConfiguration().getType())));
checkTargetDatabase(sqlStatement);
String targetDatabaseName;
if (Strings.isNullOrEmpty(sqlStatement.getTargetDatabaseName())) {
targetDatabaseName = database.getName();
} else {
ShardingSphereDatabase targetDatabase = PipelineContextManager.getProxyContext().getContextManager().getDatabase(sqlStatement.getTargetDatabaseName());
ShardingSpherePreconditions.checkNotNull(targetDatabase, () -> new PipelineInvalidParameterException(String.format("Target database `%s` is not exists",
sqlStatement.getTargetDatabaseName())));
targetDatabaseName = targetDatabase.getName();
}
MigrationJobAPI jobAPI = (MigrationJobAPI) TypedSPILoader.getService(TransmissionJobAPI.class, "MIGRATION");
jobAPI.start(new PipelineContextKey(InstanceType.PROXY), new MigrateTableStatement(sqlStatement.getSourceTargetEntries(), targetDatabaseName));
} | targetDatabaseName = targetDatabase.getName(); | public void executeUpdate(final MigrateTableStatement sqlStatement, final ContextManager contextManager) {
InstanceContext instanceContext = contextManager.getInstanceContext();
ShardingSpherePreconditions.checkState(instanceContext.isCluster(),
() -> new PipelineInvalidParameterException(String.format("Only `Cluster` is supported now, but current mode type is `%s`", instanceContext.getModeConfiguration().getType())));
String targetDatabaseName = null == sqlStatement.getTargetDatabaseName() ? database.getName() : sqlStatement.getTargetDatabaseName();
ShardingSpherePreconditions.checkState(contextManager.getMetaDataContexts().getMetaData().containsDatabase(targetDatabaseName),
() -> new MissingRequiredTargetDatabaseException(sqlStatement.getTargetDatabaseName()));
MigrationJobAPI jobAPI = (MigrationJobAPI) TypedSPILoader.getService(TransmissionJobAPI.class, "MIGRATION");
jobAPI.start(new PipelineContextKey(InstanceType.PROXY), new MigrateTableStatement(sqlStatement.getSourceTargetEntries(), targetDatabaseName));
} | class MigrateTableExecutor implements DistSQLUpdateExecutor<MigrateTableStatement>, DistSQLExecutorDatabaseAware {
private ShardingSphereDatabase database;
@Override
private void checkTargetDatabase(final MigrateTableStatement sqlStatement) {
String targetDatabaseName = null == sqlStatement.getTargetDatabaseName() ? database.getName() : sqlStatement.getTargetDatabaseName();
ShardingSpherePreconditions.checkNotNull(targetDatabaseName, MissingRequiredTargetDatabaseException::new);
}
@Override
public Class<MigrateTableStatement> getType() {
return MigrateTableStatement.class;
}
} | class MigrateTableExecutor implements DistSQLUpdateExecutor<MigrateTableStatement>, DistSQLExecutorDatabaseAware {
private ShardingSphereDatabase database;
@Override
@Override
public Class<MigrateTableStatement> getType() {
return MigrateTableStatement.class;
}
} |
because the executable functions' signatures are all varchar but not string. | private Expression translateJavaFormatter(Expression formatterExpr) {
if (formatterExpr.isLiteral() && formatterExpr.getDataType().isStringLikeType()) {
Literal literal = (Literal) formatterExpr;
String originFormatter = literal.getStringValue();
if (originFormatter.equals("yyyyMMdd")) {
return new VarcharLiteral("%Y%m%d");
} else if (originFormatter.equals("yyyy-MM-dd")) {
return new VarcharLiteral("%Y-%m-%d");
} else if (originFormatter.equals("yyyy-MM-dd HH:mm:ss")) {
return new VarcharLiteral("%Y-%m-%d %H:%i:%s");
}
}
return formatterExpr;
} | return new VarcharLiteral("%Y-%m-%d %H:%i:%s"); | private Expression translateJavaFormatter(Expression formatterExpr) {
if (formatterExpr.isLiteral() && formatterExpr.getDataType().isStringLikeType()) {
Literal literal = (Literal) formatterExpr;
String originFormatter = literal.getStringValue();
if (originFormatter.equals("yyyyMMdd")) {
return new VarcharLiteral("%Y%m%d");
} else if (originFormatter.equals("yyyy-MM-dd")) {
return new VarcharLiteral("%Y-%m-%d");
} else if (originFormatter.equals("yyyy-MM-dd HH:mm:ss")) {
return new VarcharLiteral("%Y-%m-%d %H:%i:%s");
}
}
return formatterExpr;
} | class SupportJavaDateFormatter extends AbstractExpressionRewriteRule {
public static final SupportJavaDateFormatter INSTANCE = new SupportJavaDateFormatter();
@Override
public Expression visitDateFormat(DateFormat dateFormat, ExpressionRewriteContext context) {
Expression expr = super.visitDateFormat(dateFormat, context);
if (!(expr instanceof DateFormat)) {
return expr;
}
dateFormat = (DateFormat) expr;
if (dateFormat.arity() > 1) {
return translateJavaFormatter(dateFormat, 1);
}
return dateFormat;
}
@Override
public Expression visitFromUnixtime(FromUnixtime fromUnixtime, ExpressionRewriteContext context) {
Expression expr = super.visitFromUnixtime(fromUnixtime, context);
if (!(expr instanceof FromUnixtime)) {
return expr;
}
fromUnixtime = (FromUnixtime) expr;
if (fromUnixtime.arity() > 1) {
return translateJavaFormatter(fromUnixtime, 1);
}
return fromUnixtime;
}
private Expression translateJavaFormatter(Expression function, int formatterIndex) {
Expression formatterExpr = function.getArgument(formatterIndex);
Expression newFormatterExpr = translateJavaFormatter(formatterExpr);
if (newFormatterExpr != formatterExpr) {
List<Expression> newArguments = Lists.newArrayList(function.getArguments());
newArguments.set(formatterIndex, newFormatterExpr);
return function.withChildren(newArguments);
}
return function;
}
} | class SupportJavaDateFormatter extends AbstractExpressionRewriteRule {
public static final SupportJavaDateFormatter INSTANCE = new SupportJavaDateFormatter();
@Override
public Expression visitDateFormat(DateFormat dateFormat, ExpressionRewriteContext context) {
Expression expr = super.visitDateFormat(dateFormat, context);
if (!(expr instanceof DateFormat)) {
return expr;
}
dateFormat = (DateFormat) expr;
if (dateFormat.arity() > 1) {
return translateJavaFormatter(dateFormat, 1);
}
return dateFormat;
}
@Override
public Expression visitFromUnixtime(FromUnixtime fromUnixtime, ExpressionRewriteContext context) {
Expression expr = super.visitFromUnixtime(fromUnixtime, context);
if (!(expr instanceof FromUnixtime)) {
return expr;
}
fromUnixtime = (FromUnixtime) expr;
if (fromUnixtime.arity() > 1) {
return translateJavaFormatter(fromUnixtime, 1);
}
return fromUnixtime;
}
private Expression translateJavaFormatter(Expression function, int formatterIndex) {
Expression formatterExpr = function.getArgument(formatterIndex);
Expression newFormatterExpr = translateJavaFormatter(formatterExpr);
if (newFormatterExpr != formatterExpr) {
List<Expression> newArguments = Lists.newArrayList(function.getArguments());
newArguments.set(formatterIndex, newFormatterExpr);
return function.withChildren(newArguments);
}
return function;
}
} |
This will also include application-level endpoints now (if declared), but we haven't modeled that on the config server side yet. | private void createEndpointList(DeployState deployState) {
if( deployState.getProperties().applicationId().instance().isTester()) return;
List<ApplicationClusterEndpoint> endpoints = new ArrayList<>();
List<String> hosts = getContainers().stream()
.map(AbstractService::getHostName)
.collect(Collectors.toList());
for(String suffix : deployState.getProperties().zoneDnsSuffixes()) {
ApplicationClusterEndpoint.DnsName l4Name = ApplicationClusterEndpoint.DnsName.sharedL4NameFrom(
ClusterSpec.Id.from(getName()),
deployState.getProperties().applicationId(),
suffix);
endpoints.add(ApplicationClusterEndpoint.builder()
.zoneScope()
.sharedL4Routing()
.dnsName(l4Name)
.hosts(hosts)
.build());
ApplicationClusterEndpoint.DnsName l7Name = ApplicationClusterEndpoint.DnsName.sharedNameFrom(
ClusterSpec.Id.from(getName()),
deployState.getProperties().applicationId(),
suffix);
endpoints.add(ApplicationClusterEndpoint.builder()
.zoneScope()
.sharedRouting()
.dnsName(l7Name)
.hosts(hosts)
.build());
}
Set<ContainerEndpoint> endpointsFromController = deployState.getEndpoints();
endpointsFromController.stream()
.filter(ce -> ce.clusterId().equals(getName()))
.forEach(ce -> ce.names().forEach(
name -> endpoints.add(ApplicationClusterEndpoint.builder()
.globalScope()
.sharedL4Routing()
.dnsName(ApplicationClusterEndpoint.DnsName.from(name))
.hosts(hosts)
.build())
));
endpointList = List.copyOf(endpoints);
} | Set<ContainerEndpoint> endpointsFromController = deployState.getEndpoints(); | private void createEndpointList(DeployState deployState) {
if(!deployState.isHosted()) return;
if(deployState.getProperties().applicationId().instance().isTester()) return;
List<ApplicationClusterEndpoint> endpoints = new ArrayList<>();
List<String> hosts = getContainers().stream()
.map(AbstractService::getHostName)
.collect(Collectors.toList());
for(String suffix : deployState.getProperties().zoneDnsSuffixes()) {
ApplicationClusterEndpoint.DnsName l4Name = ApplicationClusterEndpoint.DnsName.sharedL4NameFrom(
ClusterSpec.Id.from(getName()),
deployState.getProperties().applicationId(),
suffix);
endpoints.add(ApplicationClusterEndpoint.builder()
.zoneScope()
.sharedL4Routing()
.dnsName(l4Name)
.hosts(hosts)
.build());
ApplicationClusterEndpoint.DnsName l7Name = ApplicationClusterEndpoint.DnsName.sharedNameFrom(
ClusterSpec.Id.from(getName()),
deployState.getProperties().applicationId(),
suffix);
endpoints.add(ApplicationClusterEndpoint.builder()
.zoneScope()
.sharedRouting()
.dnsName(l7Name)
.hosts(hosts)
.build());
}
Set<ContainerEndpoint> endpointsFromController = deployState.getEndpoints();
endpointsFromController.stream()
.filter(ce -> ce.clusterId().equals(getName()))
.forEach(ce -> ce.names().forEach(
name -> endpoints.add(ApplicationClusterEndpoint.builder()
.scope(ce.scope())
.sharedL4Routing()
.dnsName(ApplicationClusterEndpoint.DnsName.from(name))
.hosts(hosts)
.build())
));
endpointList = List.copyOf(endpoints);
} | class ApplicationContainerCluster extends ContainerCluster<ApplicationContainer> implements
ApplicationBundlesConfig.Producer,
QrStartConfig.Producer,
RankProfilesConfig.Producer,
RankingConstantsConfig.Producer,
OnnxModelsConfig.Producer,
RankingExpressionsConfig.Producer,
ServletPathsConfig.Producer,
ContainerMbusConfig.Producer,
MetricsProxyApiConfig.Producer,
ZookeeperServerConfig.Producer,
ApplicationClusterInfo {
public static final String METRICS_V2_HANDLER_CLASS = MetricsV2Handler.class.getName();
public static final BindingPattern METRICS_V2_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH);
public static final BindingPattern METRICS_V2_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH + "/*");
public static final String PROMETHEUS_V1_HANDLER_CLASS = PrometheusV1Handler.class.getName();
private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH);
private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH + "/*");
public static final int heapSizePercentageOfTotalNodeMemory = 70;
public static final int heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster = 18;
private final Set<FileReference> applicationBundles = new LinkedHashSet<>();
private final ConfigProducerGroup<Servlet> servletGroup;
private final Set<String> previousHosts;
private ContainerModelEvaluation modelEvaluation;
private final Optional<String> tlsClientAuthority;
private MbusParams mbusParams;
private boolean messageBusEnabled = true;
private Integer memoryPercentage = null;
private List<ApplicationClusterEndpoint> endpointList = List.of();
public ApplicationContainerCluster(AbstractConfigProducer<?> parent, String configSubId, String clusterId, DeployState deployState) {
super(parent, configSubId, clusterId, deployState, true);
this.tlsClientAuthority = deployState.tlsClientAuthority();
servletGroup = new ConfigProducerGroup<>(this, "servlet");
previousHosts = deployState.getPreviousModel().stream()
.map(Model::allocatedHosts)
.map(AllocatedHosts::getHosts)
.flatMap(Collection::stream)
.map(HostSpec::hostname)
.collect(Collectors.toUnmodifiableSet());
addSimpleComponent("com.yahoo.language.provider.DefaultLinguisticsProvider");
addSimpleComponent("com.yahoo.language.provider.DefaultEmbedderProvider");
addSimpleComponent("com.yahoo.container.jdisc.SecretStoreProvider");
addSimpleComponent("com.yahoo.container.jdisc.DeprecatedSecretStoreProvider");
addSimpleComponent("com.yahoo.container.jdisc.CertificateStoreProvider");
addSimpleComponent("com.yahoo.container.jdisc.AthenzIdentityProviderProvider");
addSimpleComponent(com.yahoo.container.core.documentapi.DocumentAccessProvider.class.getName());
addMetricsHandlers();
addTestrunnerComponentsIfTester(deployState);
}
@Override
protected void doPrepare(DeployState deployState) {
addAndSendApplicationBundles(deployState);
sendUserConfiguredFiles(deployState);
createEndpointList(deployState);
}
private void addAndSendApplicationBundles(DeployState deployState) {
for (ComponentInfo component : deployState.getApplicationPackage().getComponentsInfo(deployState.getVespaVersion())) {
FileReference reference = deployState.getFileRegistry().addFile(component.getPathRelativeToAppDir());
applicationBundles.add(reference);
}
}
private void sendUserConfiguredFiles(DeployState deployState) {
FileSender fileSender = new FileSender(containers, deployState.getFileRegistry(), deployState.getDeployLogger());
for (Component<?, ?> component : getAllComponents()) {
fileSender.sendUserConfiguredFiles(component);
}
}
private void addMetricsHandlers() {
addMetricsHandler(METRICS_V2_HANDLER_CLASS, METRICS_V2_HANDLER_BINDING_1, METRICS_V2_HANDLER_BINDING_2);
addMetricsHandler(PROMETHEUS_V1_HANDLER_CLASS, PROMETHEUS_V1_HANDLER_BINDING_1, PROMETHEUS_V1_HANDLER_BINDING_2);
}
private void addMetricsHandler(String handlerClass, BindingPattern rootBinding, BindingPattern innerBinding) {
Handler<AbstractConfigProducer<?>> handler = new Handler<>(
new ComponentModel(handlerClass, null, null, null));
handler.addServerBindings(rootBinding, innerBinding);
addComponent(handler);
}
private void addTestrunnerComponentsIfTester(DeployState deployState) {
if (deployState.isHosted() && deployState.getProperties().applicationId().instance().isTester()) {
addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-testrunner-components"));
addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-osgi-testrunner"));
addPlatformBundle(PlatformBundles.absoluteBundlePath("tenant-cd-api"));
if(deployState.zone().system().isPublic()) {
addPlatformBundle(PlatformBundles.absoluteBundlePath("cloud-tenant-cd"));
}
}
}
public void setModelEvaluation(ContainerModelEvaluation modelEvaluation) {
this.modelEvaluation = modelEvaluation;
}
public Map<ComponentId, Servlet> getServletMap() {
return servletGroup.getComponentMap();
}
public void addServlet(Servlet servlet) {
servletGroup.addComponent(servlet.getGlobalComponentId(), servlet);
}
public Collection<Servlet> getAllServlets() {
return allServlets().collect(Collectors.toCollection(ArrayList::new));
}
private Stream<Servlet> allServlets() {
return servletGroup.getComponents().stream();
}
public void setMemoryPercentage(Integer memoryPercentage) { this.memoryPercentage = memoryPercentage;
}
/**
* Returns the percentage of host physical memory this application has specified for nodes in this cluster,
* or empty if this is not specified by the application.
*/
public Optional<Integer> getMemoryPercentage() { return Optional.ofNullable(memoryPercentage); }
/*
Create list of endpoints, these will be consumed later by the LBservicesProducer
*/
@Override
public void getConfig(ApplicationBundlesConfig.Builder builder) {
applicationBundles.stream().map(FileReference::value)
.forEach(builder::bundles);
}
@Override
public void getConfig(ServletPathsConfig.Builder builder) {
allServlets().forEach(servlet ->
builder.servlets(servlet.getComponentId().stringValue(),
servlet.toConfigBuilder())
);
}
@Override
public void getConfig(RankProfilesConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
@Override
public void getConfig(RankingConstantsConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
@Override
public void getConfig(OnnxModelsConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
public void getConfig(RankingExpressionsConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
@Override
public void getConfig(ContainerMbusConfig.Builder builder) {
if (mbusParams != null) {
if (mbusParams.maxConcurrentFactor != null)
builder.maxConcurrentFactor(mbusParams.maxConcurrentFactor);
if (mbusParams.documentExpansionFactor != null)
builder.documentExpansionFactor(mbusParams.documentExpansionFactor);
if (mbusParams.containerCoreMemory != null)
builder.containerCoreMemory(mbusParams.containerCoreMemory);
}
if (getDocproc() != null)
getDocproc().getConfig(builder);
}
@Override
public void getConfig(MetricsProxyApiConfig.Builder builder) {
builder.metricsPort(MetricsProxyContainer.BASEPORT)
.metricsApiPath(ApplicationMetricsHandler.METRICS_VALUES_PATH)
.prometheusApiPath(ApplicationMetricsHandler.PROMETHEUS_VALUES_PATH);
}
@Override
public void getConfig(QrStartConfig.Builder builder) {
super.getConfig(builder);
builder.jvm.verbosegc(true)
.availableProcessors(0)
.compressedClassSpaceSize(0)
.minHeapsize(1536)
.heapsize(1536);
if (getMemoryPercentage().isPresent()) {
builder.jvm.heapSizeAsPercentageOfPhysicalMemory(getMemoryPercentage().get());
} else if (isHostedVespa()) {
builder.jvm.heapSizeAsPercentageOfPhysicalMemory(getHostClusterId().isPresent() ?
heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster :
heapSizePercentageOfTotalNodeMemory);
}
}
@Override
public void getConfig(ZookeeperServerConfig.Builder builder) {
if (getParent() instanceof ConfigserverCluster) return;
for (Container container : getContainers()) {
ZookeeperServerConfig.Server.Builder serverBuilder = new ZookeeperServerConfig.Server.Builder();
serverBuilder.hostname(container.getHostName())
.id(container.index())
.joining(!previousHosts.isEmpty() &&
!previousHosts.contains(container.getHostName()));
builder.server(serverBuilder);
builder.dynamicReconfiguration(true);
}
}
public Optional<String> getTlsClientAuthority() {
return tlsClientAuthority;
}
public void setMbusParams(MbusParams mbusParams) {
this.mbusParams = mbusParams;
}
public void setMessageBusEnabled(boolean messageBusEnabled) { this.messageBusEnabled = messageBusEnabled; }
protected boolean messageBusEnabled() { return messageBusEnabled; }
public void addMbusServer(ComponentId chainId) {
ComponentId serviceId = chainId.nestInNamespace(ComponentId.fromString("MbusServer"));
addComponent(
new Component<>(new ComponentModel(new BundleInstantiationSpecification(
serviceId,
ComponentSpecification.fromString(MbusServerProvider.class.getName()),
null))));
}
@Override
public List<ApplicationClusterEndpoint> endpoints() {
return endpointList;
}
public static class MbusParams {
final Double maxConcurrentFactor;
final Double documentExpansionFactor;
final Integer containerCoreMemory;
public MbusParams(Double maxConcurrentFactor, Double documentExpansionFactor, Integer containerCoreMemory) {
this.maxConcurrentFactor = maxConcurrentFactor;
this.documentExpansionFactor = documentExpansionFactor;
this.containerCoreMemory = containerCoreMemory;
}
}
} | class ApplicationContainerCluster extends ContainerCluster<ApplicationContainer> implements
ApplicationBundlesConfig.Producer,
QrStartConfig.Producer,
RankProfilesConfig.Producer,
RankingConstantsConfig.Producer,
OnnxModelsConfig.Producer,
RankingExpressionsConfig.Producer,
ServletPathsConfig.Producer,
ContainerMbusConfig.Producer,
MetricsProxyApiConfig.Producer,
ZookeeperServerConfig.Producer,
ApplicationClusterInfo {
public static final String METRICS_V2_HANDLER_CLASS = MetricsV2Handler.class.getName();
public static final BindingPattern METRICS_V2_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH);
public static final BindingPattern METRICS_V2_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(MetricsV2Handler.V2_PATH + "/*");
public static final String PROMETHEUS_V1_HANDLER_CLASS = PrometheusV1Handler.class.getName();
private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_1 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH);
private static final BindingPattern PROMETHEUS_V1_HANDLER_BINDING_2 = SystemBindingPattern.fromHttpPath(PrometheusV1Handler.V1_PATH + "/*");
public static final int heapSizePercentageOfTotalNodeMemory = 70;
public static final int heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster = 18;
private final Set<FileReference> applicationBundles = new LinkedHashSet<>();
private final ConfigProducerGroup<Servlet> servletGroup;
private final Set<String> previousHosts;
private ContainerModelEvaluation modelEvaluation;
private final Optional<String> tlsClientAuthority;
private MbusParams mbusParams;
private boolean messageBusEnabled = true;
private Integer memoryPercentage = null;
private List<ApplicationClusterEndpoint> endpointList = List.of();
public ApplicationContainerCluster(AbstractConfigProducer<?> parent, String configSubId, String clusterId, DeployState deployState) {
super(parent, configSubId, clusterId, deployState, true);
this.tlsClientAuthority = deployState.tlsClientAuthority();
servletGroup = new ConfigProducerGroup<>(this, "servlet");
previousHosts = deployState.getPreviousModel().stream()
.map(Model::allocatedHosts)
.map(AllocatedHosts::getHosts)
.flatMap(Collection::stream)
.map(HostSpec::hostname)
.collect(Collectors.toUnmodifiableSet());
addSimpleComponent("com.yahoo.language.provider.DefaultLinguisticsProvider");
addSimpleComponent("com.yahoo.language.provider.DefaultEmbedderProvider");
addSimpleComponent("com.yahoo.container.jdisc.SecretStoreProvider");
addSimpleComponent("com.yahoo.container.jdisc.DeprecatedSecretStoreProvider");
addSimpleComponent("com.yahoo.container.jdisc.CertificateStoreProvider");
addSimpleComponent("com.yahoo.container.jdisc.AthenzIdentityProviderProvider");
addSimpleComponent(com.yahoo.container.core.documentapi.DocumentAccessProvider.class.getName());
addMetricsHandlers();
addTestrunnerComponentsIfTester(deployState);
}
@Override
protected void doPrepare(DeployState deployState) {
addAndSendApplicationBundles(deployState);
sendUserConfiguredFiles(deployState);
createEndpointList(deployState);
}
private void addAndSendApplicationBundles(DeployState deployState) {
for (ComponentInfo component : deployState.getApplicationPackage().getComponentsInfo(deployState.getVespaVersion())) {
FileReference reference = deployState.getFileRegistry().addFile(component.getPathRelativeToAppDir());
applicationBundles.add(reference);
}
}
private void sendUserConfiguredFiles(DeployState deployState) {
FileSender fileSender = new FileSender(containers, deployState.getFileRegistry(), deployState.getDeployLogger());
for (Component<?, ?> component : getAllComponents()) {
fileSender.sendUserConfiguredFiles(component);
}
}
private void addMetricsHandlers() {
addMetricsHandler(METRICS_V2_HANDLER_CLASS, METRICS_V2_HANDLER_BINDING_1, METRICS_V2_HANDLER_BINDING_2);
addMetricsHandler(PROMETHEUS_V1_HANDLER_CLASS, PROMETHEUS_V1_HANDLER_BINDING_1, PROMETHEUS_V1_HANDLER_BINDING_2);
}
private void addMetricsHandler(String handlerClass, BindingPattern rootBinding, BindingPattern innerBinding) {
Handler<AbstractConfigProducer<?>> handler = new Handler<>(
new ComponentModel(handlerClass, null, null, null));
handler.addServerBindings(rootBinding, innerBinding);
addComponent(handler);
}
private void addTestrunnerComponentsIfTester(DeployState deployState) {
if (deployState.isHosted() && deployState.getProperties().applicationId().instance().isTester()) {
addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-testrunner-components"));
addPlatformBundle(PlatformBundles.absoluteBundlePath("vespa-osgi-testrunner"));
addPlatformBundle(PlatformBundles.absoluteBundlePath("tenant-cd-api"));
if(deployState.zone().system().isPublic()) {
addPlatformBundle(PlatformBundles.absoluteBundlePath("cloud-tenant-cd"));
}
}
}
public void setModelEvaluation(ContainerModelEvaluation modelEvaluation) {
this.modelEvaluation = modelEvaluation;
}
public Map<ComponentId, Servlet> getServletMap() {
return servletGroup.getComponentMap();
}
public void addServlet(Servlet servlet) {
servletGroup.addComponent(servlet.getGlobalComponentId(), servlet);
}
public Collection<Servlet> getAllServlets() {
return allServlets().collect(Collectors.toCollection(ArrayList::new));
}
private Stream<Servlet> allServlets() {
return servletGroup.getComponents().stream();
}
public void setMemoryPercentage(Integer memoryPercentage) { this.memoryPercentage = memoryPercentage;
}
/**
* Returns the percentage of host physical memory this application has specified for nodes in this cluster,
* or empty if this is not specified by the application.
*/
public Optional<Integer> getMemoryPercentage() { return Optional.ofNullable(memoryPercentage); }
/*
Create list of endpoints, these will be consumed later by the LBservicesProducer
*/
@Override
public void getConfig(ApplicationBundlesConfig.Builder builder) {
applicationBundles.stream().map(FileReference::value)
.forEach(builder::bundles);
}
@Override
public void getConfig(ServletPathsConfig.Builder builder) {
allServlets().forEach(servlet ->
builder.servlets(servlet.getComponentId().stringValue(),
servlet.toConfigBuilder())
);
}
@Override
public void getConfig(RankProfilesConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
@Override
public void getConfig(RankingConstantsConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
@Override
public void getConfig(OnnxModelsConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
public void getConfig(RankingExpressionsConfig.Builder builder) {
if (modelEvaluation != null) modelEvaluation.getConfig(builder);
}
@Override
public void getConfig(ContainerMbusConfig.Builder builder) {
if (mbusParams != null) {
if (mbusParams.maxConcurrentFactor != null)
builder.maxConcurrentFactor(mbusParams.maxConcurrentFactor);
if (mbusParams.documentExpansionFactor != null)
builder.documentExpansionFactor(mbusParams.documentExpansionFactor);
if (mbusParams.containerCoreMemory != null)
builder.containerCoreMemory(mbusParams.containerCoreMemory);
}
if (getDocproc() != null)
getDocproc().getConfig(builder);
}
@Override
public void getConfig(MetricsProxyApiConfig.Builder builder) {
builder.metricsPort(MetricsProxyContainer.BASEPORT)
.metricsApiPath(ApplicationMetricsHandler.METRICS_VALUES_PATH)
.prometheusApiPath(ApplicationMetricsHandler.PROMETHEUS_VALUES_PATH);
}
@Override
public void getConfig(QrStartConfig.Builder builder) {
super.getConfig(builder);
builder.jvm.verbosegc(true)
.availableProcessors(0)
.compressedClassSpaceSize(0)
.minHeapsize(1536)
.heapsize(1536);
if (getMemoryPercentage().isPresent()) {
builder.jvm.heapSizeAsPercentageOfPhysicalMemory(getMemoryPercentage().get());
} else if (isHostedVespa()) {
builder.jvm.heapSizeAsPercentageOfPhysicalMemory(getHostClusterId().isPresent() ?
heapSizePercentageOfTotalNodeMemoryWhenCombinedCluster :
heapSizePercentageOfTotalNodeMemory);
}
}
@Override
public void getConfig(ZookeeperServerConfig.Builder builder) {
if (getParent() instanceof ConfigserverCluster) return;
for (Container container : getContainers()) {
ZookeeperServerConfig.Server.Builder serverBuilder = new ZookeeperServerConfig.Server.Builder();
serverBuilder.hostname(container.getHostName())
.id(container.index())
.joining(!previousHosts.isEmpty() &&
!previousHosts.contains(container.getHostName()));
builder.server(serverBuilder);
builder.dynamicReconfiguration(true);
}
}
public Optional<String> getTlsClientAuthority() {
return tlsClientAuthority;
}
public void setMbusParams(MbusParams mbusParams) {
this.mbusParams = mbusParams;
}
public void setMessageBusEnabled(boolean messageBusEnabled) { this.messageBusEnabled = messageBusEnabled; }
protected boolean messageBusEnabled() { return messageBusEnabled; }
public void addMbusServer(ComponentId chainId) {
ComponentId serviceId = chainId.nestInNamespace(ComponentId.fromString("MbusServer"));
addComponent(
new Component<>(new ComponentModel(new BundleInstantiationSpecification(
serviceId,
ComponentSpecification.fromString(MbusServerProvider.class.getName()),
null))));
}
@Override
public List<ApplicationClusterEndpoint> endpoints() {
return endpointList;
}
public static class MbusParams {
final Double maxConcurrentFactor;
final Double documentExpansionFactor;
final Integer containerCoreMemory;
public MbusParams(Double maxConcurrentFactor, Double documentExpansionFactor, Integer containerCoreMemory) {
this.maxConcurrentFactor = maxConcurrentFactor;
this.documentExpansionFactor = documentExpansionFactor;
this.containerCoreMemory = containerCoreMemory;
}
}
} |
```suggestion // Java uses BigDecimal so 0.2 * 170 = 63.9... // BigDecimal.longValue() will round down to 63 instead of the expected 64 ``` | public void testTrySplitForProcessSplitOnMiddleWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.2,
tracker,
watermarkAndState,
1,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 63), new OffsetRange(63, 100), window2);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
} | createSplitInWindow(new OffsetRange(0, 63), new OffsetRange(63, 100), window2); | public void testTrySplitForProcessSplitOnMiddleWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.2,
tracker,
watermarkAndState,
1,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 63), new OffsetRange(63, 100), window2);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
} | class SplitTest {
private IntervalWindow window1;
private IntervalWindow window2;
private IntervalWindow window3;
private WindowedValue<String> currentElement;
private OffsetRange currentRestriction;
private Instant currentWatermarkEstimatorState;
KV<Instant, Instant> watermarkAndState;
private KV<WindowedValue, WindowedValue> createSplitInWindow(
OffsetRange primaryRestriction, OffsetRange residualRestriction, BoundedWindow window) {
return KV.of(
WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(primaryRestriction, currentWatermarkEstimatorState)),
currentElement.getTimestamp(),
window,
currentElement.getPane()),
WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(residualRestriction, watermarkAndState.getValue())),
currentElement.getTimestamp(),
window,
currentElement.getPane()));
}
private KV<WindowedValue, WindowedValue> createSplitAcrossWindows(
List<BoundedWindow> primaryWindows, List<BoundedWindow> residualWindows) {
return KV.of(
primaryWindows.isEmpty()
? null
: WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(currentRestriction, currentWatermarkEstimatorState)),
currentElement.getTimestamp(),
primaryWindows,
currentElement.getPane()),
residualWindows.isEmpty()
? null
: WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(currentRestriction, currentWatermarkEstimatorState)),
currentElement.getTimestamp(),
residualWindows,
currentElement.getPane()));
}
@Before
public void setUp() {
window1 = new IntervalWindow(Instant.ofEpochMilli(0), Instant.ofEpochMilli(10));
window2 = new IntervalWindow(Instant.ofEpochMilli(10), Instant.ofEpochMilli(20));
window3 = new IntervalWindow(Instant.ofEpochMilli(20), Instant.ofEpochMilli(30));
currentElement =
WindowedValue.of(
"a",
Instant.ofEpochMilli(57),
ImmutableList.of(window1, window2, window3),
PaneInfo.NO_FIRING);
currentRestriction = new OffsetRange(0L, 100L);
currentWatermarkEstimatorState = Instant.ofEpochMilli(21);
watermarkAndState = KV.of(Instant.ofEpochMilli(42), Instant.ofEpochMilli(42));
}
@Test
public void testScaleProgress() throws Exception {
Progress elementProgress = Progress.from(2, 8);
Progress scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 1);
assertEquals(2, scaledResult.getWorkCompleted(), 0.0);
assertEquals(8, scaledResult.getWorkRemaining(), 0.0);
scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 3);
assertEquals(2, scaledResult.getWorkCompleted(), 0.0);
assertEquals(28, scaledResult.getWorkRemaining(), 0.0);
scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 1, 3);
assertEquals(12, scaledResult.getWorkCompleted(), 0.0);
assertEquals(18, scaledResult.getWorkRemaining(), 0.0);
scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 2, 3);
assertEquals(22, scaledResult.getWorkCompleted(), 0.0);
assertEquals(8, scaledResult.getWorkRemaining(), 0.0);
}
@Test
public void testTrySplitForProcessCheckpointOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessCheckpointOnFirstWindowAfterOneSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
tracker,
watermarkAndState,
0,
2);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessSplitOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 84), new OffsetRange(84, 100), window1);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
@Test
public void testTrySplitForProcessSplitOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window3,
windows,
currentWatermarkEstimatorState,
0.2,
tracker,
watermarkAndState,
2,
3);
assertEquals(3, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 44), new OffsetRange(44, 100), window3);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of());
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessSplitOnFirstWindowFallback() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(100L);
assertNull(tracker.trySplit(0.0));
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window3,
windows,
currentWatermarkEstimatorState,
0,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessSplitOnLastWindowWhenNoElementSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(100L);
assertNull(tracker.trySplit(0.0));
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window3,
windows,
currentWatermarkEstimatorState,
0,
tracker,
watermarkAndState,
2,
3);
assertNull(result);
}
@Test
public void testTrySplitForProcessOnWindowBoundaryRoundUp() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.6,
tracker,
watermarkAndState,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessOnWindowBoundaryRoundDown() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.3,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessOnWindowBoundaryRoundDownOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.9,
tracker,
watermarkAndState,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
private HandlesSplits createSplitDelegate(
double progress, double expectedFraction, HandlesSplits.SplitResult result) {
return new HandlesSplits() {
@Override
public SplitResult trySplit(double fractionOfRemainder) {
checkArgument(fractionOfRemainder == expectedFraction);
return result;
}
@Override
public double getProgress() {
return progress;
}
@Override
public String getPtranformId() {
return "transfrom_id";
}
@Override
public String getMainInputId() {
return "input_id";
}
@Override
public Collection<String> getOutputIds() {
return ImmutableSet.of("output");
}
};
}
@Test
public void testTrySplitForTruncateCheckpointOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateCheckpointOnFirstWindowAfterOneSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
0,
2);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.54, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnMiddleWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.34, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
splitDelegate,
1,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.2, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
splitDelegate,
2,
3);
assertEquals(3, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of());
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnFirstWindowFallback() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnLastWindowWhenNoElementSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, null);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
2,
3);
assertNull(result);
}
@Test
public void testTrySplitForTruncateOnWindowBoundaryRoundUp() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.6,
splitDelegate,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateOnWindowBoundaryRoundDown() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.3,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateOnWindowBoundaryRoundDownOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.6,
splitDelegate,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
} | class SplitTest {
private IntervalWindow window1;
private IntervalWindow window2;
private IntervalWindow window3;
private WindowedValue<String> currentElement;
private OffsetRange currentRestriction;
private Instant currentWatermarkEstimatorState;
KV<Instant, Instant> watermarkAndState;
private KV<WindowedValue, WindowedValue> createSplitInWindow(
OffsetRange primaryRestriction, OffsetRange residualRestriction, BoundedWindow window) {
return KV.of(
WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(primaryRestriction, currentWatermarkEstimatorState)),
currentElement.getTimestamp(),
window,
currentElement.getPane()),
WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(residualRestriction, watermarkAndState.getValue())),
currentElement.getTimestamp(),
window,
currentElement.getPane()));
}
private KV<WindowedValue, WindowedValue> createSplitAcrossWindows(
List<BoundedWindow> primaryWindows, List<BoundedWindow> residualWindows) {
return KV.of(
primaryWindows.isEmpty()
? null
: WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(currentRestriction, currentWatermarkEstimatorState)),
currentElement.getTimestamp(),
primaryWindows,
currentElement.getPane()),
residualWindows.isEmpty()
? null
: WindowedValue.of(
KV.of(
currentElement.getValue(),
KV.of(currentRestriction, currentWatermarkEstimatorState)),
currentElement.getTimestamp(),
residualWindows,
currentElement.getPane()));
}
@Before
public void setUp() {
window1 = new IntervalWindow(Instant.ofEpochMilli(0), Instant.ofEpochMilli(10));
window2 = new IntervalWindow(Instant.ofEpochMilli(10), Instant.ofEpochMilli(20));
window3 = new IntervalWindow(Instant.ofEpochMilli(20), Instant.ofEpochMilli(30));
currentElement =
WindowedValue.of(
"a",
Instant.ofEpochMilli(57),
ImmutableList.of(window1, window2, window3),
PaneInfo.NO_FIRING);
currentRestriction = new OffsetRange(0L, 100L);
currentWatermarkEstimatorState = Instant.ofEpochMilli(21);
watermarkAndState = KV.of(Instant.ofEpochMilli(42), Instant.ofEpochMilli(42));
}
@Test
public void testScaleProgress() throws Exception {
Progress elementProgress = Progress.from(2, 8);
Progress scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 1);
assertEquals(2, scaledResult.getWorkCompleted(), 0.0);
assertEquals(8, scaledResult.getWorkRemaining(), 0.0);
scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 0, 3);
assertEquals(2, scaledResult.getWorkCompleted(), 0.0);
assertEquals(28, scaledResult.getWorkRemaining(), 0.0);
scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 1, 3);
assertEquals(12, scaledResult.getWorkCompleted(), 0.0);
assertEquals(18, scaledResult.getWorkRemaining(), 0.0);
scaledResult = FnApiDoFnRunner.scaleProgress(elementProgress, 2, 3);
assertEquals(22, scaledResult.getWorkCompleted(), 0.0);
assertEquals(8, scaledResult.getWorkRemaining(), 0.0);
}
@Test
public void testTrySplitForProcessCheckpointOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessCheckpointOnFirstWindowAfterOneSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
tracker,
watermarkAndState,
0,
2);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 31), new OffsetRange(31, 100), window1);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessSplitOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 84), new OffsetRange(84, 100), window1);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
@Test
public void testTrySplitForProcessSplitOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window3,
windows,
currentWatermarkEstimatorState,
0.2,
tracker,
watermarkAndState,
2,
3);
assertEquals(3, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedElementSplit =
createSplitInWindow(new OffsetRange(0, 44), new OffsetRange(44, 100), window3);
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of());
assertEquals(expectedElementSplit.getKey(), result.getKey().getPrimarySplitRoot());
assertEquals(expectedElementSplit.getValue(), result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessSplitOnFirstWindowFallback() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(100L);
assertNull(tracker.trySplit(0.0));
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window3,
windows,
currentWatermarkEstimatorState,
0,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessSplitOnLastWindowWhenNoElementSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(100L);
assertNull(tracker.trySplit(0.0));
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window3,
windows,
currentWatermarkEstimatorState,
0,
tracker,
watermarkAndState,
2,
3);
assertNull(result);
}
@Test
public void testTrySplitForProcessOnWindowBoundaryRoundUp() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.6,
tracker,
watermarkAndState,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessOnWindowBoundaryRoundDown() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.3,
tracker,
watermarkAndState,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForProcessOnWindowBoundaryRoundDownOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
OffsetRangeTracker tracker = new OffsetRangeTracker(currentRestriction);
tracker.tryClaim(30L);
KV<WindowedSplitResult, Integer> result =
FnApiDoFnRunner.trySplitForProcess(
currentElement,
currentRestriction,
window2,
windows,
currentWatermarkEstimatorState,
0.9,
tracker,
watermarkAndState,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getPrimarySplitRoot());
assertNull(result.getKey().getResidualSplitRoot());
assertEquals(
expectedWindowSplit.getKey(), result.getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(), result.getKey().getResidualInUnprocessedWindowsRoot());
}
private HandlesSplits createSplitDelegate(
double progress, double expectedFraction, HandlesSplits.SplitResult result) {
return new HandlesSplits() {
@Override
public SplitResult trySplit(double fractionOfRemainder) {
checkArgument(fractionOfRemainder == expectedFraction);
return result;
}
@Override
public double getProgress() {
return progress;
}
};
}
@Test
public void testTrySplitForTruncateCheckpointOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateCheckpointOnFirstWindowAfterOneSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
0,
2);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnFirstWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.54, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(), ImmutableList.of(window2, window3));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnMiddleWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.34, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
splitDelegate,
1,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window3));
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult splitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.2, splitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.2,
splitDelegate,
2,
3);
assertEquals(3, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of());
assertEquals(splitResult, result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnFirstWindowFallback() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateSplitOnLastWindowWhenNoElementSplit() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
HandlesSplits splitDelegate = createSplitDelegate(1.0, 0.0, null);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.0,
splitDelegate,
2,
3);
assertNull(result);
}
@Test
public void testTrySplitForTruncateOnWindowBoundaryRoundUp() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.6,
splitDelegate,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateOnWindowBoundaryRoundDown() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.3,
splitDelegate,
0,
3);
assertEquals(1, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1), ImmutableList.of(window2, window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
@Test
public void testTrySplitForTruncateOnWindowBoundaryRoundDownOnLastWindow() throws Exception {
List<BoundedWindow> windows = ImmutableList.copyOf(currentElement.getWindows());
SplitResult unusedSplitResult =
SplitResult.of(
ImmutableList.of(BundleApplication.getDefaultInstance()),
ImmutableList.of(DelayedBundleApplication.getDefaultInstance()));
HandlesSplits splitDelegate = createSplitDelegate(0.3, 0.0, unusedSplitResult);
KV<KV<WindowedSplitResult, SplitResult>, Integer> result =
FnApiDoFnRunner.trySplitForTruncate(
currentElement,
currentRestriction,
window1,
windows,
currentWatermarkEstimatorState,
0.6,
splitDelegate,
0,
3);
assertEquals(2, (int) result.getValue());
KV<WindowedValue, WindowedValue> expectedWindowSplit =
createSplitAcrossWindows(ImmutableList.of(window1, window2), ImmutableList.of(window3));
assertNull(result.getKey().getValue());
assertEquals(
expectedWindowSplit.getKey(),
result.getKey().getKey().getPrimaryInFullyProcessedWindowsRoot());
assertEquals(
expectedWindowSplit.getValue(),
result.getKey().getKey().getResidualInUnprocessedWindowsRoot());
}
} |
Could you add some details in this pr to explain the difference between this and the original one. | private void analyzeSetExprs(Analyzer analyzer) throws AnalysisException {
Set<String> columnMappingNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
for (Expr setExpr : setExprs) {
if (!(setExpr instanceof BinaryPredicate)) {
throw new AnalysisException("Set function expr only support eq binary predicate. "
+ "Expr: " + setExpr.toSql());
}
BinaryPredicate predicate = (BinaryPredicate) setExpr;
if (predicate.getOp() != BinaryPredicate.Operator.EQ) {
throw new AnalysisException("Set function expr only support eq binary predicate. "
+ "The predicate operator error, op: " + predicate.getOp());
}
Expr lhs = predicate.getChild(0);
if (!(lhs instanceof SlotRef)) {
throw new AnalysisException("Set function expr only support eq binary predicate "
+ "which's child(0) must be a column name. "
+ "The child(0) expr error. expr: " + lhs.toSql());
}
String column = ((SlotRef) lhs).getColumnName();
if (!columnMappingNames.add(column)) {
throw new AnalysisException("Duplicate column setting: " + column);
}
}
for (Expr setExpr : setExprs) {
Preconditions.checkState(setExpr instanceof BinaryPredicate);
Expr lhs = setExpr.getChild(0);
if (!(lhs instanceof SlotRef)) {
throw new AnalysisException("The left side of the set expr must be the column name");
}
lhs.analyze(analyzer);
if (((SlotRef) lhs).getColumn().isKey()) {
throw new AnalysisException("Only value columns of unique table could be updated.");
}
Expr rhs = setExpr.getChild(1);
checkLargeIntOverflow(rhs);
rhs.analyze(analyzer);
if (lhs.getType() != rhs.getType()) {
setExpr.setChild(1, rhs.checkTypeCompatibility(lhs.getType()));
}
}
} | if (((SlotRef) lhs).getColumn().isKey()) { | private void analyzeSetExprs(Analyzer analyzer) throws AnalysisException {
Set<String> columnMappingNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
for (Expr setExpr : setExprs) {
if (!(setExpr instanceof BinaryPredicate)) {
throw new AnalysisException("Set function expr only support eq binary predicate. "
+ "Expr: " + setExpr.toSql());
}
BinaryPredicate predicate = (BinaryPredicate) setExpr;
if (predicate.getOp() != BinaryPredicate.Operator.EQ) {
throw new AnalysisException("Set function expr only support eq binary predicate. "
+ "The predicate operator error, op: " + predicate.getOp());
}
Expr lhs = predicate.getChild(0);
if (!(lhs instanceof SlotRef)) {
throw new AnalysisException("Set function expr only support eq binary predicate "
+ "which's child(0) must be a column name. "
+ "The child(0) expr error. expr: " + lhs.toSql());
}
String column = ((SlotRef) lhs).getColumnName();
if (!columnMappingNames.add(column)) {
throw new AnalysisException("Duplicate column setting: " + column);
}
}
for (Expr setExpr : setExprs) {
Preconditions.checkState(setExpr instanceof BinaryPredicate);
Expr lhs = setExpr.getChild(0);
if (!(lhs instanceof SlotRef)) {
throw new AnalysisException("The left side of the set expr must be the column name");
}
lhs.analyze(analyzer);
if (((SlotRef) lhs).getColumn().isKey()) {
throw new AnalysisException("Only value columns of unique table could be updated.");
}
Expr rhs = setExpr.getChild(1);
checkLargeIntOverflow(rhs);
rhs.analyze(analyzer);
if (lhs.getType() != rhs.getType()) {
setExpr.setChild(1, rhs.checkTypeCompatibility(lhs.getType()));
}
}
} | class UpdateStmt extends DdlStmt {
private TableName tableName;
private List<Expr> setExprs;
private Expr whereExpr;
private Table targetTable;
private TupleDescriptor srcTupleDesc;
public UpdateStmt(TableName tableName, List<Expr> setExprs, Expr whereExpr) {
this.tableName = tableName;
this.setExprs = setExprs;
this.whereExpr = whereExpr;
}
public TableName getTableName() {
return tableName;
}
public List<Expr> getSetExprs() {
return setExprs;
}
public Expr getWhereExpr() {
return whereExpr;
}
public Table getTargetTable() {
return targetTable;
}
public TupleDescriptor getSrcTupleDesc() {
return srcTupleDesc;
}
@Override
public void analyze(Analyzer analyzer) throws UserException {
super.analyze(analyzer);
analyzeTargetTable(analyzer);
analyzeSetExprs(analyzer);
analyzeWhereExpr(analyzer);
}
private void analyzeTargetTable(Analyzer analyzer) throws AnalysisException {
tableName.analyze(analyzer);
Util.prohibitExternalCatalog(tableName.getCtl(), this.getClass().getSimpleName());
if (!Env.getCurrentEnv().getAuth()
.checkTblPriv(ConnectContext.get(), tableName.getDb(), tableName.getTbl(), PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "LOAD");
}
String dbName = tableName.getDb();
String targetTableName = tableName.getTbl();
Preconditions.checkNotNull(dbName);
Preconditions.checkNotNull(targetTableName);
Database database = Env.getCurrentInternalCatalog().getDbOrAnalysisException(dbName);
targetTable = database.getTableOrAnalysisException(tableName.getTbl());
if (targetTable.getType() != Table.TableType.OLAP
|| ((OlapTable) targetTable).getKeysType() != KeysType.UNIQUE_KEYS) {
throw new AnalysisException("Only unique olap table could be updated.");
}
targetTable.readLock();
try {
srcTupleDesc = analyzer.registerOlapTable(targetTable, tableName, null);
} finally {
targetTable.readUnlock();
}
}
/*
The overflow detection of LargeInt needs to be verified again here.
The reason is: the first overflow detection(in constructor) cannot filter 2^127.
Therefore, a second verification is required here.
*/
private void checkLargeIntOverflow(Expr expr) throws AnalysisException {
if (expr instanceof LargeIntLiteral) {
expr.analyzeImpl(analyzer);
}
}
private void analyzeWhereExpr(Analyzer analyzer) throws AnalysisException {
if (whereExpr == null) {
throw new AnalysisException("Where clause is required");
}
whereExpr.analyze(analyzer);
whereExpr = analyzer.getExprRewriter().rewrite(whereExpr, analyzer, ExprRewriter.ClauseType.WHERE_CLAUSE);
whereExpr.reset();
whereExpr.analyze(analyzer);
if (!whereExpr.getType().equals(Type.BOOLEAN)) {
throw new AnalysisException("Where clause is not a valid statement return bool");
}
analyzer.registerConjunct(whereExpr, srcTupleDesc.getId());
}
@Override
public String toSql() {
StringBuilder sb = new StringBuilder("UPDATE ");
sb.append(tableName.toSql()).append("\n");
sb.append(" ").append("SET ");
for (Expr setExpr : setExprs) {
sb.append(setExpr.toSql()).append(", ");
}
sb.append("\n");
if (whereExpr != null) {
sb.append(" ").append("WHERE ").append(whereExpr.toSql());
}
return sb.toString();
}
} | class UpdateStmt extends DdlStmt {
private TableName tableName;
private List<Expr> setExprs;
private Expr whereExpr;
private Table targetTable;
private TupleDescriptor srcTupleDesc;
public UpdateStmt(TableName tableName, List<Expr> setExprs, Expr whereExpr) {
this.tableName = tableName;
this.setExprs = setExprs;
this.whereExpr = whereExpr;
}
public TableName getTableName() {
return tableName;
}
public List<Expr> getSetExprs() {
return setExprs;
}
public Expr getWhereExpr() {
return whereExpr;
}
public Table getTargetTable() {
return targetTable;
}
public TupleDescriptor getSrcTupleDesc() {
return srcTupleDesc;
}
@Override
public void analyze(Analyzer analyzer) throws UserException {
super.analyze(analyzer);
analyzeTargetTable(analyzer);
analyzeSetExprs(analyzer);
analyzeWhereExpr(analyzer);
}
private void analyzeTargetTable(Analyzer analyzer) throws AnalysisException {
tableName.analyze(analyzer);
Util.prohibitExternalCatalog(tableName.getCtl(), this.getClass().getSimpleName());
if (!Env.getCurrentEnv().getAuth()
.checkTblPriv(ConnectContext.get(), tableName.getDb(), tableName.getTbl(), PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "LOAD");
}
String dbName = tableName.getDb();
String targetTableName = tableName.getTbl();
Preconditions.checkNotNull(dbName);
Preconditions.checkNotNull(targetTableName);
Database database = Env.getCurrentInternalCatalog().getDbOrAnalysisException(dbName);
targetTable = database.getTableOrAnalysisException(tableName.getTbl());
if (targetTable.getType() != Table.TableType.OLAP
|| ((OlapTable) targetTable).getKeysType() != KeysType.UNIQUE_KEYS) {
throw new AnalysisException("Only unique olap table could be updated.");
}
targetTable.readLock();
try {
srcTupleDesc = analyzer.registerOlapTable(targetTable, tableName, null);
} finally {
targetTable.readUnlock();
}
}
/*
The overflow detection of LargeInt needs to be verified again here.
The reason is: the first overflow detection(in constructor) cannot filter 2^127.
Therefore, a second verification is required here.
*/
private void checkLargeIntOverflow(Expr expr) throws AnalysisException {
if (expr instanceof LargeIntLiteral) {
expr.analyzeImpl(analyzer);
}
}
private void analyzeWhereExpr(Analyzer analyzer) throws AnalysisException {
if (whereExpr == null) {
throw new AnalysisException("Where clause is required");
}
whereExpr.analyze(analyzer);
whereExpr = analyzer.getExprRewriter().rewrite(whereExpr, analyzer, ExprRewriter.ClauseType.WHERE_CLAUSE);
whereExpr.reset();
whereExpr.analyze(analyzer);
if (!whereExpr.getType().equals(Type.BOOLEAN)) {
throw new AnalysisException("Where clause is not a valid statement return bool");
}
analyzer.registerConjunct(whereExpr, srcTupleDesc.getId());
}
@Override
public String toSql() {
StringBuilder sb = new StringBuilder("UPDATE ");
sb.append(tableName.toSql()).append("\n");
sb.append(" ").append("SET ");
for (Expr setExpr : setExprs) {
sb.append(setExpr.toSql()).append(", ");
}
sb.append("\n");
if (whereExpr != null) {
sb.append(" ").append("WHERE ").append(whereExpr.toSql());
}
return sb.toString();
}
} |
Can't this be simplified to: ``` return billablePlans().stream() .flatMap(p -> billing.tenantsWithPlan(tenants, p.id()).stream()) .toList(); ``` | private List<TenantName> billableTenants(List<TenantName> tenants) {
return billablePlans().stream()
.collect(Collectors.toMap(
p -> p,
p -> billing.tenantsWithPlan(tenants, p.id())))
.values()
.stream()
.flatMap(Collection::stream)
.toList();
} | .toList(); | private List<TenantName> billableTenants(List<TenantName> tenants) {
return billablePlans().stream()
.flatMap(p -> billing.tenantsWithPlan(tenants, p.id()).stream())
.toList();
} | class BillingReportMaintainer extends ControllerMaintainer {
private final BillingReporter reporter;
private final BillingController billing;
private final PlanRegistry plans;
public BillingReportMaintainer(Controller controller, Duration interval) {
super(controller, interval, null, Set.of(SystemName.PublicCd));
this.reporter = controller.serviceRegistry().billingReporter();
this.billing = controller.serviceRegistry().billingController();
this.plans = controller.serviceRegistry().planRegistry();
}
@Override
protected double maintain() {
reporter.maintainResources();
maintainTenants();
return 0.0;
}
private void maintainTenants() {
var tenants = cloudTenants();
var tenantNames = List.copyOf(tenants.keySet());
var billableTenants = billableTenants(tenantNames);
billableTenants.forEach(tenant -> {
controller().tenants().lockIfPresent(tenant, LockedTenant.Cloud.class, locked -> {
var ref = reporter.maintainTenant(locked.get());
if (locked.get().billingReference().isEmpty() || ! locked.get().billingReference().get().equals(ref)) {
controller().tenants().store(locked.with(ref));
}
});
});
}
private Map<TenantName, CloudTenant> cloudTenants() {
return controller().tenants().asList()
.stream()
.filter(CloudTenant.class::isInstance)
.map(CloudTenant.class::cast)
.collect(Collectors.toMap(
Tenant::name,
Function.identity()));
}
private List<Plan> billablePlans() {
return plans.all().stream()
.filter(Plan::isBilled)
.toList();
}
} | class BillingReportMaintainer extends ControllerMaintainer {
private final BillingReporter reporter;
private final BillingController billing;
private final PlanRegistry plans;
public BillingReportMaintainer(Controller controller, Duration interval) {
super(controller, interval, null, Set.of(SystemName.PublicCd));
this.reporter = controller.serviceRegistry().billingReporter();
this.billing = controller.serviceRegistry().billingController();
this.plans = controller.serviceRegistry().planRegistry();
}
@Override
protected double maintain() {
maintainTenants();
return 0.0;
}
private void maintainTenants() {
var tenants = cloudTenants();
var tenantNames = List.copyOf(tenants.keySet());
var billableTenants = billableTenants(tenantNames);
billableTenants.forEach(tenant -> {
controller().tenants().lockIfPresent(tenant, LockedTenant.Cloud.class, locked -> {
var ref = reporter.maintainTenant(locked.get());
if (locked.get().billingReference().isEmpty() || ! locked.get().billingReference().get().equals(ref)) {
controller().tenants().store(locked.with(ref));
}
});
});
}
private Map<TenantName, CloudTenant> cloudTenants() {
return controller().tenants().asList()
.stream()
.filter(CloudTenant.class::isInstance)
.map(CloudTenant.class::cast)
.collect(Collectors.toMap(
Tenant::name,
Function.identity()));
}
private List<Plan> billablePlans() {
return plans.all().stream()
.filter(Plan::isBilled)
.toList();
}
} |
Did as you suggested and can verify that the fix works. Removing the fix causes the test to fail. | public void testClearingEnvironmentCache() throws IOException {
Path projectBCachePath = testBuildDirectory.resolve("repo").resolve("cache").resolve("samjs").resolve(
"projectB");
FileUtils.deleteDirectory(projectBCachePath.toFile());
CompileResult depCompileResult = BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/projectB1");
if (depCompileResult.getErrorCount() > 0) {
Assert.fail("Package B contains compilations error");
}
Path projectA = RESOURCE_DIRECTORY.resolve("projectA");
Project loadProjectA = TestUtils.loadBuildProject(projectA);
PackageCompilation compilation = loadProjectA.currentPackage().getCompilation();
DependencyGraph<ResolvedPackageDependency> dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
PackageManifest manifest = graphNode.packageInstance().manifest();
if (manifest.name().value().equals("projectB")) {
Assert.assertEquals(manifest.version().toString(), "1.0.0");
}
}
String newMainProjectAContent = "";
Module defaultModuleProjectA = loadProjectA.currentPackage().getDefaultModule();
Optional<DocumentId> mainDocumentId =
defaultModuleProjectA.documentIds()
.stream()
.filter(documentId -> defaultModuleProjectA.document(documentId).name().equals("main.bal"))
.findFirst();
if (mainDocumentId.isEmpty()) {
Assert.fail("Failed to retrieve the document ID");
}
Document document = defaultModuleProjectA.document(mainDocumentId.get());
document.modify().withContent(newMainProjectAContent).apply();
compilation = loadProjectA.currentPackage().getCompilation();
dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
PackageManifest manifest = graphNode.packageInstance().manifest();
if (manifest.name().value().equals("projectB")) {
Assert.fail("ProjectB found in dependency after editing ProjectA");
}
}
FileUtils.deleteDirectory(projectBCachePath.toFile());
depCompileResult = BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/projectB2");
if (depCompileResult.getErrorCount() > 0) {
Assert.fail("Package B contains compilations error");
}
String oldMainProjectAContent = "import samjs/projectB;\n" + "\n" + "public function getHello() returns " +
"(string) {\n" + " return projectB:hello();\n" + "}";
Module oldModuleProjectA = loadProjectA.currentPackage().getDefaultModule();
mainDocumentId = defaultModuleProjectA.documentIds()
.stream()
.filter(documentId -> oldModuleProjectA.document(documentId).name().equals("main.bal"))
.findFirst();
if (mainDocumentId.isEmpty()) {
Assert.fail("Failed to retrieve the document ID");
}
document = defaultModuleProjectA.document(mainDocumentId.get());
document.modify().withContent(oldMainProjectAContent).apply();
compilation = loadProjectA.currentPackage().getCompilation();
Assert.assertFalse(compilation.diagnosticResult().errorCount() == 0, "Package A has compiled successfully " +
"when it should fail");
} | public void testClearingEnvironmentCache() throws IOException {
if (isWindows()) {
throw new SkipException("Skipping tests on Windows");
}
Path projectBCachePath = testBuildDirectory.resolve("repo").resolve("cache").resolve("samjs").resolve(
"projectB");
FileUtils.deleteDirectory(projectBCachePath.toFile());
CompileResult depCompileResult = BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/projectB1");
if (depCompileResult.getErrorCount() > 0) {
Assert.fail("Package B contains compilations error");
}
Path projectA = RESOURCE_DIRECTORY.resolve("projectA");
Project loadProjectA = TestUtils.loadBuildProject(projectA);
PackageCompilation compilation = loadProjectA.currentPackage().getCompilation();
DependencyGraph<ResolvedPackageDependency> dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
PackageManifest manifest = graphNode.packageInstance().manifest();
if (manifest.name().value().equals("projectB")) {
Assert.assertEquals(manifest.version().toString(), "1.0.0");
}
}
String newMainProjectAContent = "";
Module defaultModuleProjectA = loadProjectA.currentPackage().getDefaultModule();
Optional<DocumentId> mainDocumentId =
defaultModuleProjectA.documentIds()
.stream()
.filter(documentId -> defaultModuleProjectA.document(documentId).name().equals("main.bal"))
.findFirst();
if (mainDocumentId.isEmpty()) {
Assert.fail("Failed to retrieve the document ID");
}
Document document = defaultModuleProjectA.document(mainDocumentId.get());
document.modify().withContent(newMainProjectAContent).apply();
compilation = loadProjectA.currentPackage().getCompilation();
dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
PackageManifest manifest = graphNode.packageInstance().manifest();
if (manifest.name().value().equals("projectB")) {
Assert.fail("ProjectB found in dependency after editing ProjectA");
}
}
FileUtils.deleteDirectory(projectBCachePath.toFile());
depCompileResult = BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/projectB2");
if (depCompileResult.getErrorCount() > 0) {
Assert.fail("Package B contains compilations error");
}
FileUtils.deleteDirectory(projectBCachePath.toFile());
String oldMainProjectAContent = "import samjs/projectB;\n" + "\n" + "public function getHello() returns " +
"(string) {\n" + " return projectB:hello();\n" + "}";
Module oldModuleProjectA = loadProjectA.currentPackage().getDefaultModule();
mainDocumentId = defaultModuleProjectA.documentIds()
.stream()
.filter(documentId -> oldModuleProjectA.document(documentId).name().equals("main.bal"))
.findFirst();
if (mainDocumentId.isEmpty()) {
Assert.fail("Failed to retrieve the document ID");
}
document = defaultModuleProjectA.document(mainDocumentId.get());
document.modify().withContent(oldMainProjectAContent).apply();
compilation = loadProjectA.currentPackage().getCompilation();
Assert.assertNotEquals(compilation.diagnosticResult().errorCount(), 0, "Package A has compiled successfully " +
"when it should fail");
} | class PackageResolutionTests extends BaseTest {
private static final Path RESOURCE_DIRECTORY = Paths.get(
"src/test/resources/projects_for_resolution_tests").toAbsolutePath();
private static final Path testBuildDirectory = Paths.get("build").toAbsolutePath();
@BeforeTest
public void setup() throws IOException {
cacheDependencyToLocalRepo(RESOURCE_DIRECTORY.resolve("package_c_with_pkg_private_function"));
}
@Test(description = "tests resolution with zero direct dependencies")
public void testProjectWithZeroDependencies() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_c");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 0,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with one direct dependency")
public void testProjectWithOneDependency() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_b");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 1,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with invalid build file")
public void testProjectWithInvalidBuildFile() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/package_o_1_0_0");
BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/package_o_1_0_2");
BuildOptions.BuildOptionsBuilder buildOptionsBuilder = BuildOptions.builder().setExperimental(true);
buildOptionsBuilder.setSticky(false);
BuildOptions buildOptions = buildOptionsBuilder.build();
Project loadProject = TestUtils.loadBuildProject(projectDirPath, buildOptions);
if (loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME).toFile().exists()) {
TestUtils.deleteDirectory(loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME).toFile());
}
Files.createDirectory(loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME));
Files.createFile(loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE));
PackageCompilation compilation = loadProject.currentPackage().getCompilation();
DependencyGraph<ResolvedPackageDependency> dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
Collection<ResolvedPackageDependency> directDeps = dependencyGraph.getDirectDependencies(graphNode);
PackageManifest manifest = graphNode.packageInstance().manifest();
if (manifest.name().value().equals("package_o")) {
Assert.assertEquals(manifest.version().toString(), "1.0.2");
}
}
}
@Test(dependsOnMethods = "testProjectWithInvalidBuildFile", description = "tests project with empty build file")
public void testProjectSaveWithEmptyBuildFile() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
Files.deleteIfExists(buildPath);
Files.createFile(buildPath);
loadProject.save();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithEmptyBuildFile", description = "tests project with empty build file")
public void testProjectSaveWithNewlineBuildFile() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
Files.deleteIfExists(buildPath);
Files.createFile(buildPath);
Files.write(buildPath, "\n".getBytes());
loadProject.save();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithNewlineBuildFile",
description = "tests project with corrupt build file")
public void testProjectSaveWithCorruptBuildFile() throws IOException {
if (isWindows()) {
throw new SkipException("Skipping tests on Windows");
}
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
Files.deleteIfExists(buildPath);
Files.createFile(buildPath);
Files.writeString(buildPath, "Invalid");
loadProject.save();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithCorruptBuildFile", description = "tests project with no read " +
"permissions")
public void testProjectSaveWithNoReadPermission() throws IOException {
if (isWindows()) {
throw new SkipException("Skipping tests on Windows");
}
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
boolean readable = buildPath.toFile().setReadable(false, false);
if (!readable) {
Assert.fail("could not set readable permission");
}
loadProject.save();
PackageCompilation compilation = loadProject.currentPackage().getCompilation();
Assert.assertTrue(Files.exists(buildPath));
readable = buildPath.toFile().setReadable(true, true);
if (!readable) {
Assert.fail("could not set readable permission");
}
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithNoReadPermission", description = "tests project with no write " +
"permissions")
public void testProjectSaveWithNoWritePermission() throws IOException {
if (isWindows()) {
throw new SkipException("Skipping tests on Windows");
}
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
boolean writable = buildPath.toFile().setWritable(false, false);
if (!writable) {
Assert.fail("could not set writable permission");
}
loadProject.save();
PackageCompilation compilation = loadProject.currentPackage().getCompilation();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test()
@Test(description = "tests resolution with one transitive dependency")
public void testProjectWithOneTransitiveDependency() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_a");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 1,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with two direct dependencies and one transitive")
public void testProjectWithTwoDirectDependencies() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_d");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 2,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with one transitive dependency",
expectedExceptions = ProjectException.class,
expectedExceptionsMessageRegExp = "Transitive dependency cannot be found: " +
"org=samjs, package=package_missing, version=1.0.0", enabled = false)
public void testProjectWithMissingTransitiveDependency() throws IOException {
Path balaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("missing_transitive_deps")
.resolve("samjs-package_kk-any-1.0.0.bala");
BCompileUtil.copyBalaToDistRepository(balaPath, "samjs", "package_kk", "1.0.0");
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_missing_transitive_dep");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
buildProject.currentPackage().getResolution();
}
@Test(description = "Test dependencies should not be stored in bala archive")
public void testProjectWithTransitiveTestDependencies() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_with_test_dependency");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DependencyGraph<ResolvedPackageDependency> depGraphOfSrcProject =
compilation.getResolution().dependencyGraph();
Assert.assertEquals(depGraphOfSrcProject.getNodes().size(), 2);
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(compilation, JvmTarget.JAVA_11);
DiagnosticResult diagnosticResult = jBallerinaBackend.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
String balaName = ProjectUtils.getBalaName(buildProject.currentPackage().manifest());
Path balaDir = testBuildDirectory.resolve("test_gen_balas");
Path balaPath = balaDir.resolve(balaName);
Files.createDirectories(balaDir);
jBallerinaBackend.emit(JBallerinaBackend.OutputType.BALA, balaDir);
BalaProject balaProject = BalaProject.loadProject(BCompileUtil.getTestProjectEnvironmentBuilder(), balaPath);
PackageResolution resolution = balaProject.currentPackage().getResolution();
DependencyGraph<ResolvedPackageDependency> depGraphOfBala = resolution.dependencyGraph();
Assert.assertEquals(depGraphOfBala.getNodes().size(), 1);
}
@Test(description = "Ultimate test case", enabled = false)
public void testProjectWithManyDependencies() {
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_runtime");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_jsonutils");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_io_1_4_2");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_io_1_5_0");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_cache");
OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
long initialOpenCount = 0;
if (os instanceof UnixOperatingSystemMXBean) {
UnixOperatingSystemMXBean unixOperatingSystemMXBean = (UnixOperatingSystemMXBean) os;
initialOpenCount = unixOperatingSystemMXBean.getOpenFileDescriptorCount();
}
Project project = BCompileUtil.loadProject(
"projects_for_resolution_tests/ultimate_package_resolution/package_http");
PackageCompilation compilation = project.currentPackage().getCompilation();
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(compilation, JvmTarget.JAVA_11);
DiagnosticResult diagnosticResult = jBallerinaBackend.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
if (os instanceof UnixOperatingSystemMXBean) {
UnixOperatingSystemMXBean unixOperatingSystemMXBean = (UnixOperatingSystemMXBean) os;
Assert.assertEquals(initialOpenCount, unixOperatingSystemMXBean.getOpenFileDescriptorCount());
}
Package currentPkg = project.currentPackage();
Assert.assertEquals(currentPkg.packageDependencies().size(), 3);
DependencyGraph<ResolvedPackageDependency> dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
Collection<ResolvedPackageDependency> directDeps = dependencyGraph.getDirectDependencies(graphNode);
PackageManifest manifest = graphNode.packageInstance().manifest();
switch (manifest.name().value()) {
case "io":
Assert.assertEquals(manifest.version().toString(), "1.5.0");
break;
case "http":
Assert.assertEquals(directDeps.size(), 3);
break;
case "cache":
Assert.assertEquals(directDeps.size(), 1);
break;
case "jsonutils":
Assert.assertEquals(graphNode.scope(), PackageDependencyScope.TEST_ONLY);
break;
default:
throw new IllegalStateException("Unexpected dependency");
}
}
}
@Test(description = "tests loading a valid bala project")
public void testBalaProjectDependencyResolution() {
Path balaPath = getBalaPath("samjs", "package_b", "0.1.0");
ProjectEnvironmentBuilder defaultBuilder = ProjectEnvironmentBuilder.getDefaultBuilder();
defaultBuilder.addCompilationCacheFactory(TempDirCompilationCache::from);
BalaProject balaProject = BalaProject.loadProject(defaultBuilder, balaPath);
PackageResolution resolution = balaProject.currentPackage().getResolution();
DependencyGraph<ResolvedPackageDependency> dependencyGraph = resolution.dependencyGraph();
List<ResolvedPackageDependency> nodeInGraph = dependencyGraph.toTopologicallySortedList();
Assert.assertEquals(nodeInGraph.size(), 2);
}
@Test(enabled = false, dependsOnMethods = "testResolveDependencyFromUnsupportedCustomRepo")
public void testResolveDependencyFromCustomRepo() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_b");
String dependencyContent = "[[dependency]]\n" +
"org = \"samjs\"\n" +
"name = \"package_c\"\n" +
"version = \"0.1.0\"\n" +
"repository = \"local\"";
Environment environment = EnvironmentBuilder.getBuilder().setUserHome(USER_HOME).build();
ProjectEnvironmentBuilder projectEnvironmentBuilder = ProjectEnvironmentBuilder.getBuilder(environment);
BuildProject project = TestUtils.loadBuildProject(projectEnvironmentBuilder, projectDirPath);
project.currentPackage().dependenciesToml().orElseThrow().modify().withContent(dependencyContent).apply();
PackageCompilation compilation = project.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
Assert.assertEquals(diagnosticResult.errorCount(), 2);
}
@Test (enabled = false)
public void testResolveDependencyFromUnsupportedCustomRepo() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_b");
String dependencyContent = "[[dependency]]\n" +
"org = \"samjs\"\n" +
"name = \"package_c\"\n" +
"version = \"0.1.0\"\n" +
"repository = \"stdlib.local\"";
Environment environment = EnvironmentBuilder.getBuilder().setUserHome(USER_HOME).build();
ProjectEnvironmentBuilder projectEnvironmentBuilder = ProjectEnvironmentBuilder.getBuilder(environment);
BuildProject project = TestUtils.loadBuildProject(projectEnvironmentBuilder, projectDirPath);
project.currentPackage().dependenciesToml().get().modify().withContent(dependencyContent).apply();
PackageCompilation compilation = project.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
Assert.assertEquals(diagnosticResult.errorCount(), 3);
List<String> diagnosticMsgs = diagnosticResult.errors().stream()
.map(Diagnostic::message).collect(Collectors.toList());
Assert.assertTrue(diagnosticMsgs.contains("cannot resolve module 'samjs/package_c.mod_c1 as mod_c1'"));
}
@Test(description = "tests resolution with invalid bala dependency", enabled = false)
public void testProjectWithInvalidBalaDependency() throws IOException {
Path balaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("invalid")
.resolve("bash-soap-any-0.1.0.bala");
BCompileUtil.copyBalaToDistRepository(balaPath, "bash", "soap", "0.1.0");
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_x_with_invalid_bala_dep");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 6, "Unexpected compilation diagnostics");
Iterator<Diagnostic> diagnosticIterator = diagnosticResult.diagnostics().iterator();
Assert.assertTrue(diagnosticIterator.next().toString().contains("invalid bala file:"));
Assert.assertTrue(diagnosticIterator.next().toString().contains("invalid bala file:"));
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [bar.bal:(3:1,3:18)] cannot resolve module 'bash/soap'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [bar.bal:(6:1,6:1)] missing semicolon token");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(1:1,1:18)] cannot resolve module 'bash/soap'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(5:1,5:1)] missing semicolon token");
}
@Test(description = "tests resolution with invalid transitive bala dependency", enabled = false)
public void testProjectWithInvalidTransitiveBalaDependency() throws IOException {
Path zipBalaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("invalid")
.resolve("zip-2020r1-java8-1.0.4.balo");
BCompileUtil.copyBalaToDistRepository(zipBalaPath, "hemikak", "zip", "1.0.4");
Path helloBalaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("invalid")
.resolve("hello-2020r1-any-0.1.0.balo");
BCompileUtil.copyBalaToDistRepository(helloBalaPath, "bache", "hello", "0.1.0");
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_xx_with_invalid_transitive_bala_dep");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 4, "Unexpected compilation diagnostics");
Iterator<Diagnostic> diagnosticIterator = diagnosticResult.diagnostics().iterator();
Assert.assertTrue(diagnosticIterator.next().toString().contains(
"ERROR [foo.bal:(1:1,1:20)] invalid bala file:"));
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(1:1,1:20)] cannot resolve module 'bache/hello'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(4:20,4:39)] undefined function 'zip'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(4:20,4:39)] undefined module 'hello'");
}
@Test(description = "tests package name resolution response")
public void testPackageNameResolution() {
DefaultPackageResolver mockResolver = mock(DefaultPackageResolver.class);
List<ImportModuleRequest> moduleRequests = new ArrayList<>();
moduleRequests.add(new ImportModuleRequest(PackageOrg.from("ballerina"), "java.arrays"));
moduleRequests.add(new ImportModuleRequest(PackageOrg.from("ballerina"), "sample.module"));
List<ImportModuleResponse> moduleResponse = new ArrayList<>();
for (ImportModuleRequest request : moduleRequests) {
String[] parts = request.moduleName().split("[.]");
moduleResponse.add(new ImportModuleResponse(
PackageDescriptor.from(request.packageOrg(), PackageName.from(parts[0])), request));
}
when(mockResolver.resolvePackageNames(any(), any(ResolutionOptions.class))).thenReturn(moduleResponse);
Assert.assertEquals(mockResolver.resolvePackageNames(moduleRequests,
ResolutionOptions.builder().build()).size(), 2);
}
@Test(description = "tests resolution for dependency given in Ballerina.toml without repository")
public void testPackageResolutionOfDependencyMissingRepository() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_y_having_dependency_missing_repo");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 4, "Unexpected compilation diagnostics");
Iterator<Diagnostic> diagnosticIterator = diagnosticResult.diagnostics().iterator();
Assert.assertTrue(diagnosticIterator.next().toString().contains(
"ERROR [Ballerina.toml:(6:1,9:18)] 'repository' under [[dependency]] is missing"));
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [fee.bal:(1:1,1:16)] cannot resolve module 'ccc/ddd'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [fee.bal:(4:2,4:27)] undefined function 'notExistingFunction'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [fee.bal:(4:2,4:27)] undefined module 'ddd'");
}
} | class PackageResolutionTests extends BaseTest {
private static final Path RESOURCE_DIRECTORY = Paths.get(
"src/test/resources/projects_for_resolution_tests").toAbsolutePath();
private static final Path testBuildDirectory = Paths.get("build").toAbsolutePath();
@BeforeTest
public void setup() throws IOException {
cacheDependencyToLocalRepo(RESOURCE_DIRECTORY.resolve("package_c_with_pkg_private_function"));
}
@Test(description = "tests resolution with zero direct dependencies")
public void testProjectWithZeroDependencies() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_c");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 0,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with one direct dependency")
public void testProjectWithOneDependency() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_b");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 1,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with invalid build file")
public void testProjectWithInvalidBuildFile() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/package_o_1_0_0");
BCompileUtil.compileAndCacheBala("projects_for_resolution_tests/package_o_1_0_2");
BuildOptions.BuildOptionsBuilder buildOptionsBuilder = BuildOptions.builder().setExperimental(true);
buildOptionsBuilder.setSticky(false);
BuildOptions buildOptions = buildOptionsBuilder.build();
Project loadProject = TestUtils.loadBuildProject(projectDirPath, buildOptions);
if (loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME).toFile().exists()) {
TestUtils.deleteDirectory(loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME).toFile());
}
Files.createDirectory(loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME));
Files.createFile(loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE));
PackageCompilation compilation = loadProject.currentPackage().getCompilation();
DependencyGraph<ResolvedPackageDependency> dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
Collection<ResolvedPackageDependency> directDeps = dependencyGraph.getDirectDependencies(graphNode);
PackageManifest manifest = graphNode.packageInstance().manifest();
if (manifest.name().value().equals("package_o")) {
Assert.assertEquals(manifest.version().toString(), "1.0.2");
}
}
}
@Test(dependsOnMethods = "testProjectWithInvalidBuildFile", description = "tests project with empty build file")
public void testProjectSaveWithEmptyBuildFile() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
Files.deleteIfExists(buildPath);
Files.createFile(buildPath);
loadProject.save();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithEmptyBuildFile", description = "tests project with empty build file")
public void testProjectSaveWithNewlineBuildFile() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
Files.deleteIfExists(buildPath);
Files.createFile(buildPath);
Files.write(buildPath, "\n".getBytes());
loadProject.save();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithNewlineBuildFile",
description = "tests project with corrupt build file")
public void testProjectSaveWithCorruptBuildFile() throws IOException {
if (isWindows()) {
throw new SkipException("Skipping tests on Windows");
}
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
Files.deleteIfExists(buildPath);
Files.createFile(buildPath);
Files.writeString(buildPath, "Invalid");
loadProject.save();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithCorruptBuildFile", description = "tests project with no read " +
"permissions")
public void testProjectSaveWithNoReadPermission() throws IOException {
if (isWindows()) {
throw new SkipException("Skipping tests on Windows");
}
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
boolean readable = buildPath.toFile().setReadable(false, false);
if (!readable) {
Assert.fail("could not set readable permission");
}
loadProject.save();
PackageCompilation compilation = loadProject.currentPackage().getCompilation();
Assert.assertTrue(Files.exists(buildPath));
readable = buildPath.toFile().setReadable(true, true);
if (!readable) {
Assert.fail("could not set readable permission");
}
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test(dependsOnMethods = "testProjectSaveWithNoReadPermission", description = "tests project with no write " +
"permissions")
public void testProjectSaveWithNoWritePermission() throws IOException {
if (isWindows()) {
throw new SkipException("Skipping tests on Windows");
}
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_n");
Project loadProject = TestUtils.loadBuildProject(projectDirPath);
Path buildPath = loadProject.sourceRoot().resolve(ProjectConstants.TARGET_DIR_NAME)
.resolve(ProjectConstants.BUILD_FILE);
boolean writable = buildPath.toFile().setWritable(false, false);
if (!writable) {
Assert.fail("could not set writable permission");
}
loadProject.save();
PackageCompilation compilation = loadProject.currentPackage().getCompilation();
Assert.assertTrue(Files.exists(buildPath));
BuildJson buildJson = ProjectUtils.readBuildJson(buildPath);
Assert.assertFalse(buildJson.isExpiredLastUpdateTime());
}
@Test()
@Test(description = "tests resolution with one transitive dependency")
public void testProjectWithOneTransitiveDependency() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_a");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 1,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with two direct dependencies and one transitive")
public void testProjectWithTwoDirectDependencies() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_d");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
Assert.assertEquals(buildProject.currentPackage().packageDependencies().size(), 2,
"Unexpected number of dependencies");
}
@Test(description = "tests resolution with one transitive dependency",
expectedExceptions = ProjectException.class,
expectedExceptionsMessageRegExp = "Transitive dependency cannot be found: " +
"org=samjs, package=package_missing, version=1.0.0", enabled = false)
public void testProjectWithMissingTransitiveDependency() throws IOException {
Path balaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("missing_transitive_deps")
.resolve("samjs-package_kk-any-1.0.0.bala");
BCompileUtil.copyBalaToDistRepository(balaPath, "samjs", "package_kk", "1.0.0");
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_missing_transitive_dep");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
buildProject.currentPackage().getResolution();
}
@Test(description = "Test dependencies should not be stored in bala archive")
public void testProjectWithTransitiveTestDependencies() throws IOException {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_with_test_dependency");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DependencyGraph<ResolvedPackageDependency> depGraphOfSrcProject =
compilation.getResolution().dependencyGraph();
Assert.assertEquals(depGraphOfSrcProject.getNodes().size(), 2);
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(compilation, JvmTarget.JAVA_11);
DiagnosticResult diagnosticResult = jBallerinaBackend.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
String balaName = ProjectUtils.getBalaName(buildProject.currentPackage().manifest());
Path balaDir = testBuildDirectory.resolve("test_gen_balas");
Path balaPath = balaDir.resolve(balaName);
Files.createDirectories(balaDir);
jBallerinaBackend.emit(JBallerinaBackend.OutputType.BALA, balaDir);
BalaProject balaProject = BalaProject.loadProject(BCompileUtil.getTestProjectEnvironmentBuilder(), balaPath);
PackageResolution resolution = balaProject.currentPackage().getResolution();
DependencyGraph<ResolvedPackageDependency> depGraphOfBala = resolution.dependencyGraph();
Assert.assertEquals(depGraphOfBala.getNodes().size(), 1);
}
@Test(description = "Ultimate test case", enabled = false)
public void testProjectWithManyDependencies() {
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_runtime");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_jsonutils");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_io_1_4_2");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_io_1_5_0");
BCompileUtil.compileAndCacheBala(
"projects_for_resolution_tests/ultimate_package_resolution/package_cache");
OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
long initialOpenCount = 0;
if (os instanceof UnixOperatingSystemMXBean) {
UnixOperatingSystemMXBean unixOperatingSystemMXBean = (UnixOperatingSystemMXBean) os;
initialOpenCount = unixOperatingSystemMXBean.getOpenFileDescriptorCount();
}
Project project = BCompileUtil.loadProject(
"projects_for_resolution_tests/ultimate_package_resolution/package_http");
PackageCompilation compilation = project.currentPackage().getCompilation();
JBallerinaBackend jBallerinaBackend = JBallerinaBackend.from(compilation, JvmTarget.JAVA_11);
DiagnosticResult diagnosticResult = jBallerinaBackend.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 0, "Unexpected compilation diagnostics");
if (os instanceof UnixOperatingSystemMXBean) {
UnixOperatingSystemMXBean unixOperatingSystemMXBean = (UnixOperatingSystemMXBean) os;
Assert.assertEquals(initialOpenCount, unixOperatingSystemMXBean.getOpenFileDescriptorCount());
}
Package currentPkg = project.currentPackage();
Assert.assertEquals(currentPkg.packageDependencies().size(), 3);
DependencyGraph<ResolvedPackageDependency> dependencyGraph = compilation.getResolution().dependencyGraph();
for (ResolvedPackageDependency graphNode : dependencyGraph.getNodes()) {
Collection<ResolvedPackageDependency> directDeps = dependencyGraph.getDirectDependencies(graphNode);
PackageManifest manifest = graphNode.packageInstance().manifest();
switch (manifest.name().value()) {
case "io":
Assert.assertEquals(manifest.version().toString(), "1.5.0");
break;
case "http":
Assert.assertEquals(directDeps.size(), 3);
break;
case "cache":
Assert.assertEquals(directDeps.size(), 1);
break;
case "jsonutils":
Assert.assertEquals(graphNode.scope(), PackageDependencyScope.TEST_ONLY);
break;
default:
throw new IllegalStateException("Unexpected dependency");
}
}
}
@Test(description = "tests loading a valid bala project")
public void testBalaProjectDependencyResolution() {
Path balaPath = getBalaPath("samjs", "package_b", "0.1.0");
ProjectEnvironmentBuilder defaultBuilder = ProjectEnvironmentBuilder.getDefaultBuilder();
defaultBuilder.addCompilationCacheFactory(TempDirCompilationCache::from);
BalaProject balaProject = BalaProject.loadProject(defaultBuilder, balaPath);
PackageResolution resolution = balaProject.currentPackage().getResolution();
DependencyGraph<ResolvedPackageDependency> dependencyGraph = resolution.dependencyGraph();
List<ResolvedPackageDependency> nodeInGraph = dependencyGraph.toTopologicallySortedList();
Assert.assertEquals(nodeInGraph.size(), 2);
}
@Test(enabled = false, dependsOnMethods = "testResolveDependencyFromUnsupportedCustomRepo")
public void testResolveDependencyFromCustomRepo() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_b");
String dependencyContent = "[[dependency]]\n" +
"org = \"samjs\"\n" +
"name = \"package_c\"\n" +
"version = \"0.1.0\"\n" +
"repository = \"local\"";
Environment environment = EnvironmentBuilder.getBuilder().setUserHome(USER_HOME).build();
ProjectEnvironmentBuilder projectEnvironmentBuilder = ProjectEnvironmentBuilder.getBuilder(environment);
BuildProject project = TestUtils.loadBuildProject(projectEnvironmentBuilder, projectDirPath);
project.currentPackage().dependenciesToml().orElseThrow().modify().withContent(dependencyContent).apply();
PackageCompilation compilation = project.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
Assert.assertEquals(diagnosticResult.errorCount(), 2);
}
@Test (enabled = false)
public void testResolveDependencyFromUnsupportedCustomRepo() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_b");
String dependencyContent = "[[dependency]]\n" +
"org = \"samjs\"\n" +
"name = \"package_c\"\n" +
"version = \"0.1.0\"\n" +
"repository = \"stdlib.local\"";
Environment environment = EnvironmentBuilder.getBuilder().setUserHome(USER_HOME).build();
ProjectEnvironmentBuilder projectEnvironmentBuilder = ProjectEnvironmentBuilder.getBuilder(environment);
BuildProject project = TestUtils.loadBuildProject(projectEnvironmentBuilder, projectDirPath);
project.currentPackage().dependenciesToml().get().modify().withContent(dependencyContent).apply();
PackageCompilation compilation = project.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
Assert.assertEquals(diagnosticResult.errorCount(), 3);
List<String> diagnosticMsgs = diagnosticResult.errors().stream()
.map(Diagnostic::message).collect(Collectors.toList());
Assert.assertTrue(diagnosticMsgs.contains("cannot resolve module 'samjs/package_c.mod_c1 as mod_c1'"));
}
@Test(description = "tests resolution with invalid bala dependency", enabled = false)
public void testProjectWithInvalidBalaDependency() throws IOException {
Path balaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("invalid")
.resolve("bash-soap-any-0.1.0.bala");
BCompileUtil.copyBalaToDistRepository(balaPath, "bash", "soap", "0.1.0");
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_x_with_invalid_bala_dep");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 6, "Unexpected compilation diagnostics");
Iterator<Diagnostic> diagnosticIterator = diagnosticResult.diagnostics().iterator();
Assert.assertTrue(diagnosticIterator.next().toString().contains("invalid bala file:"));
Assert.assertTrue(diagnosticIterator.next().toString().contains("invalid bala file:"));
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [bar.bal:(3:1,3:18)] cannot resolve module 'bash/soap'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [bar.bal:(6:1,6:1)] missing semicolon token");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(1:1,1:18)] cannot resolve module 'bash/soap'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(5:1,5:1)] missing semicolon token");
}
@Test(description = "tests resolution with invalid transitive bala dependency", enabled = false)
public void testProjectWithInvalidTransitiveBalaDependency() throws IOException {
Path zipBalaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("invalid")
.resolve("zip-2020r1-java8-1.0.4.balo");
BCompileUtil.copyBalaToDistRepository(zipBalaPath, "hemikak", "zip", "1.0.4");
Path helloBalaPath = RESOURCE_DIRECTORY.resolve("balas").resolve("invalid")
.resolve("hello-2020r1-any-0.1.0.balo");
BCompileUtil.copyBalaToDistRepository(helloBalaPath, "bache", "hello", "0.1.0");
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_xx_with_invalid_transitive_bala_dep");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 4, "Unexpected compilation diagnostics");
Iterator<Diagnostic> diagnosticIterator = diagnosticResult.diagnostics().iterator();
Assert.assertTrue(diagnosticIterator.next().toString().contains(
"ERROR [foo.bal:(1:1,1:20)] invalid bala file:"));
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(1:1,1:20)] cannot resolve module 'bache/hello'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(4:20,4:39)] undefined function 'zip'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [foo.bal:(4:20,4:39)] undefined module 'hello'");
}
@Test(description = "tests package name resolution response")
public void testPackageNameResolution() {
DefaultPackageResolver mockResolver = mock(DefaultPackageResolver.class);
List<ImportModuleRequest> moduleRequests = new ArrayList<>();
moduleRequests.add(new ImportModuleRequest(PackageOrg.from("ballerina"), "java.arrays"));
moduleRequests.add(new ImportModuleRequest(PackageOrg.from("ballerina"), "sample.module"));
List<ImportModuleResponse> moduleResponse = new ArrayList<>();
for (ImportModuleRequest request : moduleRequests) {
String[] parts = request.moduleName().split("[.]");
moduleResponse.add(new ImportModuleResponse(
PackageDescriptor.from(request.packageOrg(), PackageName.from(parts[0])), request));
}
when(mockResolver.resolvePackageNames(any(), any(ResolutionOptions.class))).thenReturn(moduleResponse);
Assert.assertEquals(mockResolver.resolvePackageNames(moduleRequests,
ResolutionOptions.builder().build()).size(), 2);
}
@Test(description = "tests resolution for dependency given in Ballerina.toml without repository")
public void testPackageResolutionOfDependencyMissingRepository() {
Path projectDirPath = RESOURCE_DIRECTORY.resolve("package_y_having_dependency_missing_repo");
BuildProject buildProject = TestUtils.loadBuildProject(projectDirPath);
PackageCompilation compilation = buildProject.currentPackage().getCompilation();
DiagnosticResult diagnosticResult = compilation.diagnosticResult();
diagnosticResult.errors().forEach(OUT::println);
Assert.assertEquals(diagnosticResult.diagnosticCount(), 4, "Unexpected compilation diagnostics");
Iterator<Diagnostic> diagnosticIterator = diagnosticResult.diagnostics().iterator();
Assert.assertTrue(diagnosticIterator.next().toString().contains(
"ERROR [Ballerina.toml:(6:1,9:18)] 'repository' under [[dependency]] is missing"));
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [fee.bal:(1:1,1:16)] cannot resolve module 'ccc/ddd'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [fee.bal:(4:2,4:27)] undefined function 'notExistingFunction'");
Assert.assertEquals(diagnosticIterator.next().toString(),
"ERROR [fee.bal:(4:2,4:27)] undefined module 'ddd'");
}
} |
|
I am wondering why this wasn't a problem with `InetSocketAddress` ? It was also present in `toString()` and was not initialized in Constructor. | public String toString() {
return "GatewayConnectionConfig{" +
"requestTimeout=" + requestTimeout +
", maxConnectionPoolSize=" + maxConnectionPoolSize +
", idleConnectionTimeout=" + idleConnectionTimeout +
", proxyType=" + proxy.getType() +
", inetSocketProxyAddress=" + proxy.getAddress() +
'}';
} | ", proxyType=" + proxy.getType() + | public String toString() {
String proxyType = proxy != null ? proxy.getType().toString() : null;
String proxyAddress = proxy != null ? proxy.getAddress().toString() : null;
return "GatewayConnectionConfig{" +
"requestTimeout=" + requestTimeout +
", maxConnectionPoolSize=" + maxConnectionPoolSize +
", idleConnectionTimeout=" + idleConnectionTimeout +
", proxyType=" + proxyType +
", inetSocketProxyAddress=" + proxyAddress +
'}';
} | class GatewayConnectionConfig {
private static final Duration DEFAULT_REQUEST_TIMEOUT = Duration.ofSeconds(60);
private static final Duration DEFAULT_IDLE_CONNECTION_TIMEOUT = Duration.ofSeconds(60);
private static final int DEFAULT_MAX_POOL_SIZE = 1000;
private Duration requestTimeout;
private int maxConnectionPoolSize;
private Duration idleConnectionTimeout;
private ProxyOptions proxy;
/**
* Constructor.
*/
public GatewayConnectionConfig() {
this.idleConnectionTimeout = DEFAULT_IDLE_CONNECTION_TIMEOUT;
this.maxConnectionPoolSize = DEFAULT_MAX_POOL_SIZE;
this.requestTimeout = DEFAULT_REQUEST_TIMEOUT;
}
/**
* Gets the default Gateway connection configuration.
*
* @return the default gateway connection configuration.
*/
public static GatewayConnectionConfig getDefaultConfig() {
return new GatewayConnectionConfig();
}
/**
* Gets the request timeout (time to wait for response from network peer).
*
* @return the request timeout duration.
*/
public Duration getRequestTimeout() {
return this.requestTimeout;
}
/**
* Sets the request timeout (time to wait for response from network peer).
* The default is 60 seconds.
*
* @param requestTimeout the request timeout duration.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setRequestTimeout(Duration requestTimeout) {
this.requestTimeout = requestTimeout;
return this;
}
/**
* Gets the value of the connection pool size the client is using.
*
* @return connection pool size.
*/
public int getMaxConnectionPoolSize() {
return this.maxConnectionPoolSize;
}
/**
* Sets the value of the connection pool size, the default
* is 1000.
*
* @param maxConnectionPoolSize The value of the connection pool size.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setMaxConnectionPoolSize(int maxConnectionPoolSize) {
this.maxConnectionPoolSize = maxConnectionPoolSize;
return this;
}
/**
* Gets the value of the timeout for an idle connection, the default is 60
* seconds.
*
* @return Idle connection timeout duration.
*/
public Duration getIdleConnectionTimeout() {
return this.idleConnectionTimeout;
}
/**
* sets the value of the timeout for an idle connection. After that time,
* the connection will be automatically closed.
*
* @param idleConnectionTimeout the duration for an idle connection.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setIdleConnectionTimeout(Duration idleConnectionTimeout) {
this.idleConnectionTimeout = idleConnectionTimeout;
return this;
}
/**
* Gets the proxy options which contain the InetSocketAddress of proxy server.
*
* @return the proxy options.
*/
public ProxyOptions getProxy() {
return this.proxy;
}
/**
* This will create the InetSocketAddress for proxy server,
* all the requests to cosmoDB will route from this address.
*
* @param proxy The proxy server.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setProxy(ProxyOptions proxy) {
if (proxy.getType() != ProxyOptions.Type.HTTP) {
throw new IllegalArgumentException("Proxy type is not supported " + proxy.getType());
}
this.proxy = proxy;
return this;
}
@Override
} | class GatewayConnectionConfig {
private static final Duration DEFAULT_REQUEST_TIMEOUT = Duration.ofSeconds(60);
private static final Duration DEFAULT_IDLE_CONNECTION_TIMEOUT = Duration.ofSeconds(60);
private static final int DEFAULT_MAX_POOL_SIZE = 1000;
private Duration requestTimeout;
private int maxConnectionPoolSize;
private Duration idleConnectionTimeout;
private ProxyOptions proxy;
/**
* Constructor.
*/
public GatewayConnectionConfig() {
this.idleConnectionTimeout = DEFAULT_IDLE_CONNECTION_TIMEOUT;
this.maxConnectionPoolSize = DEFAULT_MAX_POOL_SIZE;
this.requestTimeout = DEFAULT_REQUEST_TIMEOUT;
}
/**
* Gets the default Gateway connection configuration.
*
* @return the default gateway connection configuration.
*/
public static GatewayConnectionConfig getDefaultConfig() {
return new GatewayConnectionConfig();
}
/**
* Gets the request timeout (time to wait for response from network peer).
*
* @return the request timeout duration.
*/
public Duration getRequestTimeout() {
return this.requestTimeout;
}
/**
* Sets the request timeout (time to wait for response from network peer).
* The default is 60 seconds.
*
* @param requestTimeout the request timeout duration.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setRequestTimeout(Duration requestTimeout) {
this.requestTimeout = requestTimeout;
return this;
}
/**
* Gets the value of the connection pool size the client is using.
*
* @return connection pool size.
*/
public int getMaxConnectionPoolSize() {
return this.maxConnectionPoolSize;
}
/**
* Sets the value of the connection pool size, the default
* is 1000.
*
* @param maxConnectionPoolSize The value of the connection pool size.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setMaxConnectionPoolSize(int maxConnectionPoolSize) {
this.maxConnectionPoolSize = maxConnectionPoolSize;
return this;
}
/**
* Gets the value of the timeout for an idle connection, the default is 60
* seconds.
*
* @return Idle connection timeout duration.
*/
public Duration getIdleConnectionTimeout() {
return this.idleConnectionTimeout;
}
/**
* sets the value of the timeout for an idle connection. After that time,
* the connection will be automatically closed.
*
* @param idleConnectionTimeout the duration for an idle connection.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setIdleConnectionTimeout(Duration idleConnectionTimeout) {
this.idleConnectionTimeout = idleConnectionTimeout;
return this;
}
/**
* Gets the proxy options which contain the InetSocketAddress of proxy server.
*
* @return the proxy options.
*/
public ProxyOptions getProxy() {
return this.proxy;
}
/**
* This will create the InetSocketAddress for proxy server,
* all the requests to cosmoDB will route from this address.
*
* @param proxy The proxy server.
* @return the {@link GatewayConnectionConfig}.
*/
public GatewayConnectionConfig setProxy(ProxyOptions proxy) {
if (proxy.getType() != ProxyOptions.Type.HTTP) {
throw new IllegalArgumentException("Only http proxy type is supported.");
}
this.proxy = proxy;
return this;
}
@Override
} |
Is it supposed to be `!result.isSetStatus()`? | public String getProxyStatus() {
if (result == null) {
return QueryState.MysqlStateType.UNKNOWN.name();
}
if (result.isSetStatus()) {
return QueryState.MysqlStateType.UNKNOWN.name();
} else {
return result.getStatus();
}
} | if (result.isSetStatus()) { | public String getProxyStatus() {
if (result == null) {
return QueryState.MysqlStateType.UNKNOWN.name();
}
if (!result.isSetStatus()) {
return QueryState.MysqlStateType.UNKNOWN.name();
} else {
return result.getStatus();
}
} | class MasterOpExecutor {
private static final Logger LOG = LogManager.getLogger(MasterOpExecutor.class);
private final OriginStatement originStmt;
private final ConnectContext ctx;
private TMasterOpResult result;
private int waitTimeoutMs;
private int thriftTimeoutMs;
private boolean shouldNotRetry;
public MasterOpExecutor(OriginStatement originStmt, ConnectContext ctx, RedirectStatus status, boolean isQuery) {
this.originStmt = originStmt;
this.ctx = ctx;
if (status.isNeedToWaitJournalSync()) {
this.waitTimeoutMs = ctx.getSessionVariable().getQueryTimeoutS() * 1000;
} else {
this.waitTimeoutMs = 0;
}
this.thriftTimeoutMs = ctx.getSessionVariable().getQueryTimeoutS() * 1000;
this.shouldNotRetry = !isQuery;
}
public void execute() throws Exception {
Span forwardSpan =
ctx.getTracer().spanBuilder("forward").setParent(Context.current())
.startSpan();
try (Scope scope = forwardSpan.makeCurrent()) {
forward();
} catch (Exception e) {
forwardSpan.recordException(e);
throw e;
} finally {
forwardSpan.end();
}
LOG.info("forwarding to master get result max journal id: {}", result.maxJournalId);
ctx.getEnv().getJournalObservable().waitOn(result.maxJournalId, waitTimeoutMs);
}
private void forward() throws Exception {
if (!ctx.getEnv().isReady()) {
throw new Exception("Node catalog is not ready, please wait for a while.");
}
String masterHost = ctx.getEnv().getMasterIp();
int masterRpcPort = ctx.getEnv().getMasterRpcPort();
TNetworkAddress thriftAddress = new TNetworkAddress(masterHost, masterRpcPort);
FrontendService.Client client = null;
try {
client = ClientPool.frontendPool.borrowObject(thriftAddress, thriftTimeoutMs);
} catch (Exception e) {
throw new Exception("Failed to get master client.", e);
}
TMasterOpRequest params = new TMasterOpRequest();
params.setCluster(ctx.getClusterName());
params.setSql(originStmt.originStmt);
params.setStmtIdx(originStmt.idx);
params.setUser(ctx.getQualifiedUser());
params.setDb(ctx.getDatabase());
params.setResourceInfo(ctx.toResourceCtx());
params.setUserIp(ctx.getRemoteIP());
params.setStmtId(ctx.getStmtId());
params.setCurrentUserIdent(ctx.getCurrentUserIdentity().toThrift());
params.setQueryOptions(ctx.getSessionVariable().getQueryOptionVariables());
params.setSessionVariables(ctx.getSessionVariable().getForwardVariables());
Map<String, String> traceCarrier = new HashMap<String, String>();
Telemetry.getOpenTelemetry().getPropagators().getTextMapPropagator()
.inject(Context.current(), traceCarrier, (carrier, key, value) -> carrier.put(key, value));
params.setTraceCarrier(traceCarrier);
if (null != ctx.queryId()) {
params.setQueryId(ctx.queryId());
}
LOG.info("Forward statement {} to Master {}", ctx.getStmtId(), thriftAddress);
boolean isReturnToPool = false;
try {
result = client.forward(params);
isReturnToPool = true;
} catch (TTransportException e) {
boolean ok = ClientPool.frontendPool.reopen(client, thriftTimeoutMs);
if (!ok) {
throw e;
}
if (shouldNotRetry || e.getType() == TTransportException.TIMED_OUT) {
throw e;
} else {
LOG.warn("Forward statement " + ctx.getStmtId() + " to Master " + thriftAddress + " twice", e);
result = client.forward(params);
isReturnToPool = true;
}
} finally {
if (isReturnToPool) {
ClientPool.frontendPool.returnObject(thriftAddress, client);
} else {
ClientPool.frontendPool.invalidateObject(thriftAddress, client);
}
}
}
public ByteBuffer getOutputPacket() {
if (result == null) {
return null;
}
return result.packet;
}
public TUniqueId getQueryId() {
if (result != null && result.isSetQueryId()) {
return result.getQueryId();
} else {
return null;
}
}
public ShowResultSet getProxyResultSet() {
if (result == null) {
return null;
}
if (result.isSetResultSet()) {
return new ShowResultSet(result.resultSet);
} else {
return null;
}
}
public void setResult(TMasterOpResult result) {
this.result = result;
}
} | class MasterOpExecutor {
private static final Logger LOG = LogManager.getLogger(MasterOpExecutor.class);
private final OriginStatement originStmt;
private final ConnectContext ctx;
private TMasterOpResult result;
private int waitTimeoutMs;
private int thriftTimeoutMs;
private boolean shouldNotRetry;
public MasterOpExecutor(OriginStatement originStmt, ConnectContext ctx, RedirectStatus status, boolean isQuery) {
this.originStmt = originStmt;
this.ctx = ctx;
if (status.isNeedToWaitJournalSync()) {
this.waitTimeoutMs = ctx.getSessionVariable().getQueryTimeoutS() * 1000;
} else {
this.waitTimeoutMs = 0;
}
this.thriftTimeoutMs = ctx.getSessionVariable().getQueryTimeoutS() * 1000;
this.shouldNotRetry = !isQuery;
}
public void execute() throws Exception {
Span forwardSpan =
ctx.getTracer().spanBuilder("forward").setParent(Context.current())
.startSpan();
try (Scope scope = forwardSpan.makeCurrent()) {
forward();
} catch (Exception e) {
forwardSpan.recordException(e);
throw e;
} finally {
forwardSpan.end();
}
LOG.info("forwarding to master get result max journal id: {}", result.maxJournalId);
ctx.getEnv().getJournalObservable().waitOn(result.maxJournalId, waitTimeoutMs);
}
private void forward() throws Exception {
if (!ctx.getEnv().isReady()) {
throw new Exception("Node catalog is not ready, please wait for a while.");
}
String masterHost = ctx.getEnv().getMasterIp();
int masterRpcPort = ctx.getEnv().getMasterRpcPort();
TNetworkAddress thriftAddress = new TNetworkAddress(masterHost, masterRpcPort);
FrontendService.Client client = null;
try {
client = ClientPool.frontendPool.borrowObject(thriftAddress, thriftTimeoutMs);
} catch (Exception e) {
throw new Exception("Failed to get master client.", e);
}
TMasterOpRequest params = new TMasterOpRequest();
params.setCluster(ctx.getClusterName());
params.setSql(originStmt.originStmt);
params.setStmtIdx(originStmt.idx);
params.setUser(ctx.getQualifiedUser());
params.setDb(ctx.getDatabase());
params.setResourceInfo(ctx.toResourceCtx());
params.setUserIp(ctx.getRemoteIP());
params.setStmtId(ctx.getStmtId());
params.setCurrentUserIdent(ctx.getCurrentUserIdentity().toThrift());
params.setQueryOptions(ctx.getSessionVariable().getQueryOptionVariables());
params.setSessionVariables(ctx.getSessionVariable().getForwardVariables());
Map<String, String> traceCarrier = new HashMap<String, String>();
Telemetry.getOpenTelemetry().getPropagators().getTextMapPropagator()
.inject(Context.current(), traceCarrier, (carrier, key, value) -> carrier.put(key, value));
params.setTraceCarrier(traceCarrier);
if (null != ctx.queryId()) {
params.setQueryId(ctx.queryId());
}
LOG.info("Forward statement {} to Master {}", ctx.getStmtId(), thriftAddress);
boolean isReturnToPool = false;
try {
result = client.forward(params);
isReturnToPool = true;
} catch (TTransportException e) {
boolean ok = ClientPool.frontendPool.reopen(client, thriftTimeoutMs);
if (!ok) {
throw e;
}
if (shouldNotRetry || e.getType() == TTransportException.TIMED_OUT) {
throw e;
} else {
LOG.warn("Forward statement " + ctx.getStmtId() + " to Master " + thriftAddress + " twice", e);
result = client.forward(params);
isReturnToPool = true;
}
} finally {
if (isReturnToPool) {
ClientPool.frontendPool.returnObject(thriftAddress, client);
} else {
ClientPool.frontendPool.invalidateObject(thriftAddress, client);
}
}
}
public ByteBuffer getOutputPacket() {
if (result == null) {
return null;
}
return result.packet;
}
public TUniqueId getQueryId() {
if (result != null && result.isSetQueryId()) {
return result.getQueryId();
} else {
return null;
}
}
public ShowResultSet getProxyResultSet() {
if (result == null) {
return null;
}
if (result.isSetResultSet()) {
return new ShowResultSet(result.resultSet);
} else {
return null;
}
}
public void setResult(TMasterOpResult result) {
this.result = result;
}
} |
Thread-safe to mutate shared request config builder? | public void dispatch(HttpRequest wrapped, CompletableFuture<HttpResponse> vessel) {
Endpoint leastBusy = endpoints.get(0);
int min = Integer.MAX_VALUE;
int start = ++someNumber % endpoints.size();
for (int i = 0; i < endpoints.size(); i++) {
Endpoint endpoint = endpoints.get((i + start) % endpoints.size());
int inflight = endpoint.inflight.get();
if (inflight < min) {
leastBusy = endpoint;
min = inflight;
}
}
Endpoint endpoint = leastBusy;
endpoint.inflight.incrementAndGet();
dispatchExecutor.execute(() -> {
try {
SimpleHttpRequest request = new SimpleHttpRequest(wrapped.method(), wrapped.path());
request.setScheme(endpoint.url.getScheme());
request.setAuthority(new URIAuthority(endpoint.url.getHost(), portOf(endpoint.url)));
long timeoutMillis = wrapped.timeout() == null ? 190_000 : wrapped.timeout().toMillis() * 11 / 10 + 1_000;
request.setConfig(requestConfig.setResponseTimeout(Timeout.ofMilliseconds(timeoutMillis)).build());
defaultHeaders.forEach(request::setHeader);
wrapped.headers().forEach((name, value) -> request.setHeader(name, value.get()));
if (wrapped.body() != null) {
byte[] body = wrapped.body();
if (compression == gzip || compression == auto && body.length > 512) {
request.setHeader(gzipEncodingHeader);
body = gzipped(body);
}
request.setBody(body, ContentType.APPLICATION_JSON);
}
Future<?> future = endpoint.client.execute(request,
new FutureCallback<SimpleHttpResponse>() {
@Override public void completed(SimpleHttpResponse response) { vessel.complete(new ApacheHttpResponse(response)); }
@Override public void failed(Exception ex) { vessel.completeExceptionally(ex); }
@Override public void cancelled() { vessel.cancel(false); }
});
Future<?> cancellation = timeoutExecutor.schedule(() -> {
future.cancel(true);
vessel.cancel(true);
}, timeoutMillis + 10_000, TimeUnit.MILLISECONDS);
vessel.whenComplete((__, ___) -> cancellation.cancel(true));
}
catch (Throwable thrown) {
vessel.completeExceptionally(thrown);
}
vessel.whenComplete((__, ___) -> endpoint.inflight.decrementAndGet());
});
} | request.setConfig(requestConfig.setResponseTimeout(Timeout.ofMilliseconds(timeoutMillis)).build()); | public void dispatch(HttpRequest wrapped, CompletableFuture<HttpResponse> vessel) {
Endpoint leastBusy = endpoints.get(0);
int min = Integer.MAX_VALUE;
int start = ++someNumber % endpoints.size();
for (int i = 0; i < endpoints.size(); i++) {
Endpoint endpoint = endpoints.get((i + start) % endpoints.size());
int inflight = endpoint.inflight.get();
if (inflight < min) {
leastBusy = endpoint;
min = inflight;
}
}
Endpoint endpoint = leastBusy;
endpoint.inflight.incrementAndGet();
dispatchExecutor.execute(() -> {
try {
SimpleHttpRequest request = new SimpleHttpRequest(wrapped.method(), wrapped.path());
request.setScheme(endpoint.url.getScheme());
request.setAuthority(new URIAuthority(endpoint.url.getHost(), portOf(endpoint.url)));
long timeoutMillis = wrapped.timeout() == null ? 190_000 : wrapped.timeout().toMillis() * 11 / 10 + 1_000;
request.setConfig(RequestConfig.copy(requestConfig).setResponseTimeout(Timeout.ofMilliseconds(timeoutMillis)).build());
defaultHeaders.forEach(request::setHeader);
wrapped.headers().forEach((name, value) -> request.setHeader(name, value.get()));
if (wrapped.body() != null) {
byte[] body = wrapped.body();
if (compression == gzip || compression == auto && body.length > 512) {
request.setHeader(gzipEncodingHeader);
body = gzipped(body);
}
request.setBody(body, ContentType.APPLICATION_JSON);
}
Future<?> future = endpoint.client.execute(request,
new FutureCallback<SimpleHttpResponse>() {
@Override public void completed(SimpleHttpResponse response) { vessel.complete(new ApacheHttpResponse(response)); }
@Override public void failed(Exception ex) { vessel.completeExceptionally(ex); }
@Override public void cancelled() { vessel.cancel(false); }
});
Future<?> cancellation = timeoutExecutor.schedule(() -> { future.cancel(true); vessel.cancel(true); },
timeoutMillis + 10_000,
TimeUnit.MILLISECONDS);
vessel.whenComplete((__, ___) -> cancellation.cancel(true));
}
catch (Throwable thrown) {
vessel.completeExceptionally(thrown);
}
vessel.whenComplete((__, ___) -> endpoint.inflight.decrementAndGet());
});
} | class ApacheCluster implements Cluster {
private final List<Endpoint> endpoints = new ArrayList<>();
private final List<BasicHeader> defaultHeaders = Arrays.asList(new BasicHeader(HttpHeaders.USER_AGENT, String.format("vespa-feed-client/%s", Vespa.VERSION)),
new BasicHeader("Vespa-Client-Version", Vespa.VERSION));
private final Header gzipEncodingHeader = new BasicHeader(HttpHeaders.CONTENT_ENCODING, "gzip");
private final RequestConfig.Builder requestConfig;
private final Compression compression;
private int someNumber = 0;
private final ExecutorService dispatchExecutor = Executors.newFixedThreadPool(8, t -> new Thread(t, "request-dispatch-thread"));
private final ScheduledExecutorService timeoutExecutor = Executors.newSingleThreadScheduledExecutor(t -> new Thread(t, "request-timeout-thread"));
ApacheCluster(FeedClientBuilderImpl builder) throws IOException {
for (int i = 0; i < builder.connectionsPerEndpoint; i++)
for (URI endpoint : builder.endpoints)
endpoints.add(new Endpoint(createHttpClient(builder), endpoint));
this.requestConfig = createRequestConfig(builder);
this.compression = builder.compression;
}
@Override
private byte[] gzipped(byte[] content) throws IOException{
ByteArrayOutputStream buffer = new ByteArrayOutputStream(1 << 10);
try (GZIPOutputStream zip = new GZIPOutputStream(buffer)) {
zip.write(content);
}
return buffer.toByteArray();
}
@Override
public void close() {
Throwable thrown = null;
dispatchExecutor.shutdownNow().forEach(Runnable::run);
for (Endpoint endpoint : endpoints) {
try {
endpoint.client.close();
}
catch (Throwable t) {
if (thrown == null) thrown = t;
else thrown.addSuppressed(t);
}
}
timeoutExecutor.shutdownNow().forEach(Runnable::run);
if (thrown != null) throw new RuntimeException(thrown);
}
private static class Endpoint {
private final CloseableHttpAsyncClient client;
private final AtomicInteger inflight = new AtomicInteger(0);
private final URI url;
private Endpoint(CloseableHttpAsyncClient client, URI url) {
this.client = client;
this.url = url;
this.client.start();
}
}
@SuppressWarnings("deprecation")
private static CloseableHttpAsyncClient createHttpClient(FeedClientBuilderImpl builder) throws IOException {
SSLContext sslContext = builder.constructSslContext();
String[] allowedCiphers = excludeH2Blacklisted(excludeWeak(sslContext.getSupportedSSLParameters().getCipherSuites()));
if (allowedCiphers.length == 0)
throw new IllegalStateException("No adequate SSL cipher suites supported by the JVM");
ClientTlsStrategyBuilder tlsStrategyBuilder = ClientTlsStrategyBuilder.create()
.setTlsDetailsFactory(TlsDetailsFactory::create)
.setCiphers(allowedCiphers)
.setSslContext(sslContext);
if (builder.hostnameVerifier != null)
tlsStrategyBuilder.setHostnameVerifier(builder.hostnameVerifier);
return HttpAsyncClients.createHttp2Minimal(H2Config.custom()
.setMaxConcurrentStreams(builder.maxStreamsPerConnection)
.setCompressionEnabled(true)
.setPushEnabled(false)
.setInitialWindowSize(Integer.MAX_VALUE)
.build(),
IOReactorConfig.custom()
.setIoThreadCount(2)
.setTcpNoDelay(true)
.setSoTimeout(Timeout.ofSeconds(10))
.build(),
tlsStrategyBuilder.build());
}
private static int portOf(URI url) {
return url.getPort() == -1 ? url.getScheme().equals("http") ? 80 : 443
: url.getPort();
}
@SuppressWarnings("deprecation")
private static RequestConfig.Builder createRequestConfig(FeedClientBuilderImpl b) {
RequestConfig.Builder builder = RequestConfig.custom()
.setConnectTimeout(Timeout.ofSeconds(10))
.setConnectionRequestTimeout(Timeout.DISABLED);
if (b.proxy != null) builder.setProxy(new HttpHost(b.proxy.getScheme(), b.proxy.getHost(), b.proxy.getPort()));
return builder;
}
private static class ApacheHttpResponse implements HttpResponse {
private final SimpleHttpResponse wrapped;
private ApacheHttpResponse(SimpleHttpResponse wrapped) {
this.wrapped = wrapped;
}
@Override
public int code() {
return wrapped.getCode();
}
@Override
public byte[] body() {
return wrapped.getBodyBytes();
}
@Override
public String contentType() {
return wrapped.getContentType().getMimeType();
}
@Override
public String toString() {
return "HTTP response with code " + code() +
(body() != null ? " and body '" + wrapped.getBodyText() + "'" : "");
}
}
} | class ApacheCluster implements Cluster {
private final List<Endpoint> endpoints = new ArrayList<>();
private final List<BasicHeader> defaultHeaders = Arrays.asList(new BasicHeader(HttpHeaders.USER_AGENT, String.format("vespa-feed-client/%s", Vespa.VERSION)),
new BasicHeader("Vespa-Client-Version", Vespa.VERSION));
private final Header gzipEncodingHeader = new BasicHeader(HttpHeaders.CONTENT_ENCODING, "gzip");
private final RequestConfig requestConfig;
private final Compression compression;
private int someNumber = 0;
private final ExecutorService dispatchExecutor = Executors.newFixedThreadPool(8, t -> new Thread(t, "request-dispatch-thread"));
private final ScheduledExecutorService timeoutExecutor = Executors.newSingleThreadScheduledExecutor(t -> new Thread(t, "request-timeout-thread"));
ApacheCluster(FeedClientBuilderImpl builder) throws IOException {
for (int i = 0; i < builder.connectionsPerEndpoint; i++)
for (URI endpoint : builder.endpoints)
endpoints.add(new Endpoint(createHttpClient(builder), endpoint));
this.requestConfig = createRequestConfig(builder);
this.compression = builder.compression;
}
@Override
private byte[] gzipped(byte[] content) throws IOException{
ByteArrayOutputStream buffer = new ByteArrayOutputStream(1 << 10);
try (GZIPOutputStream zip = new GZIPOutputStream(buffer)) {
zip.write(content);
}
return buffer.toByteArray();
}
@Override
public void close() {
Throwable thrown = null;
dispatchExecutor.shutdownNow().forEach(Runnable::run);
for (Endpoint endpoint : endpoints) {
try {
endpoint.client.close();
}
catch (Throwable t) {
if (thrown == null) thrown = t;
else thrown.addSuppressed(t);
}
}
timeoutExecutor.shutdownNow().forEach(Runnable::run);
if (thrown != null) throw new RuntimeException(thrown);
}
private static class Endpoint {
private final CloseableHttpAsyncClient client;
private final AtomicInteger inflight = new AtomicInteger(0);
private final URI url;
private Endpoint(CloseableHttpAsyncClient client, URI url) {
this.client = client;
this.url = url;
this.client.start();
}
}
@SuppressWarnings("deprecation")
private static CloseableHttpAsyncClient createHttpClient(FeedClientBuilderImpl builder) throws IOException {
SSLContext sslContext = builder.constructSslContext();
String[] allowedCiphers = excludeH2Blacklisted(excludeWeak(sslContext.getSupportedSSLParameters().getCipherSuites()));
if (allowedCiphers.length == 0)
throw new IllegalStateException("No adequate SSL cipher suites supported by the JVM");
ClientTlsStrategyBuilder tlsStrategyBuilder = ClientTlsStrategyBuilder.create()
.setTlsDetailsFactory(TlsDetailsFactory::create)
.setCiphers(allowedCiphers)
.setSslContext(sslContext);
if (builder.hostnameVerifier != null)
tlsStrategyBuilder.setHostnameVerifier(builder.hostnameVerifier);
return HttpAsyncClients.createHttp2Minimal(H2Config.custom()
.setMaxConcurrentStreams(builder.maxStreamsPerConnection)
.setCompressionEnabled(true)
.setPushEnabled(false)
.setInitialWindowSize(Integer.MAX_VALUE)
.build(),
IOReactorConfig.custom()
.setIoThreadCount(2)
.setTcpNoDelay(true)
.setSoTimeout(Timeout.ofSeconds(10))
.build(),
tlsStrategyBuilder.build());
}
private static int portOf(URI url) {
return url.getPort() == -1 ? url.getScheme().equals("http") ? 80 : 443
: url.getPort();
}
@SuppressWarnings("deprecation")
private static RequestConfig createRequestConfig(FeedClientBuilderImpl b) {
RequestConfig.Builder builder = RequestConfig.custom()
.setConnectTimeout(Timeout.ofSeconds(10))
.setConnectionRequestTimeout(Timeout.DISABLED);
if (b.proxy != null) builder.setProxy(new HttpHost(b.proxy.getScheme(), b.proxy.getHost(), b.proxy.getPort()));
return builder.build();
}
private static class ApacheHttpResponse implements HttpResponse {
private final SimpleHttpResponse wrapped;
private ApacheHttpResponse(SimpleHttpResponse wrapped) {
this.wrapped = wrapped;
}
@Override
public int code() {
return wrapped.getCode();
}
@Override
public byte[] body() {
return wrapped.getBodyBytes();
}
@Override
public String contentType() {
return wrapped.getContentType().getMimeType();
}
@Override
public String toString() {
return "HTTP response with code " + code() +
(body() != null ? " and body '" + wrapped.getBodyText() + "'" : "");
}
}
} |
@jsquire may know... Do we allow users to set a client identifier for EventProcessorClient? (ie. does that mean all the new underlying AMQP connections it creates, also use that identifier?) | EventHubAsyncClient buildAsyncClient() {
if (retryOptions == null) {
retryOptions = DEFAULT_RETRY;
}
if (scheduler == null) {
scheduler = Schedulers.boundedElastic();
}
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT;
}
final MessageSerializer messageSerializer = new EventHubMessageSerializer();
final EventHubConnectionProcessor processor;
if (isSharedConnection.get()) {
synchronized (connectionLock) {
if (eventHubConnectionProcessor == null) {
eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer);
}
}
processor = eventHubConnectionProcessor;
final int numberOfOpenClients = openClients.incrementAndGet();
LOGGER.info("
} else {
processor = buildConnectionProcessor(messageSerializer);
}
final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
String identifier;
if (clientOptions != null && clientOptions instanceof AmqpClientOptions) {
String clientOptionIdentifier = ((AmqpClientOptions) clientOptions).getIdentifier();
identifier = clientOptionIdentifier == null ? UUID.randomUUID().toString() : clientOptionIdentifier;
} else {
identifier = UUID.randomUUID().toString();
}
return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler,
isSharedConnection.get(), this::onClientClose,
identifier);
} | } | EventHubAsyncClient buildAsyncClient() {
if (retryOptions == null) {
retryOptions = DEFAULT_RETRY;
}
if (scheduler == null) {
scheduler = Schedulers.boundedElastic();
}
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT;
}
final MessageSerializer messageSerializer = new EventHubMessageSerializer();
final EventHubConnectionProcessor processor;
if (isSharedConnection.get()) {
synchronized (connectionLock) {
if (eventHubConnectionProcessor == null) {
eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer);
}
}
processor = eventHubConnectionProcessor;
final int numberOfOpenClients = openClients.incrementAndGet();
LOGGER.info("
} else {
processor = buildConnectionProcessor(messageSerializer);
}
final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class));
String identifier;
if (clientOptions instanceof AmqpClientOptions) {
String clientOptionIdentifier = ((AmqpClientOptions) clientOptions).getIdentifier();
identifier = CoreUtils.isNullOrEmpty(clientOptionIdentifier) ? UUID.randomUUID().toString() : clientOptionIdentifier;
} else {
identifier = UUID.randomUUID().toString();
}
return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler,
isSharedConnection.get(), this::onClientClose,
identifier);
} | class EventHubClientBuilder implements
TokenCredentialTrait<EventHubClientBuilder>,
AzureNamedKeyCredentialTrait<EventHubClientBuilder>,
ConnectionStringTrait<EventHubClientBuilder>,
AzureSasCredentialTrait<EventHubClientBuilder>,
AmqpTrait<EventHubClientBuilder>,
ConfigurationTrait<EventHubClientBuilder> {
static final int DEFAULT_PREFETCH_COUNT = 500;
static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1;
/**
* The name of the default consumer group in the Event Hubs service.
*/
public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default";
/**
* The minimum value allowed for the prefetch count of the consumer.
*/
private static final int MINIMUM_PREFETCH_COUNT = 1;
/**
* The maximum value allowed for the prefetch count of the consumer.
*/
private static final int MAXIMUM_PREFETCH_COUNT = 8000;
private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties";
private static final String NAME_KEY = "name";
private static final String VERSION_KEY = "version";
private static final String UNKNOWN = "UNKNOWN";
private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING";
private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions()
.setTryTimeout(ClientConstants.OPERATION_TIMEOUT);
private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+");
private static final ClientLogger LOGGER = new ClientLogger(EventHubClientBuilder.class);
private final Object connectionLock = new Object();
private final AtomicBoolean isSharedConnection = new AtomicBoolean();
private TokenCredential credentials;
private Configuration configuration;
private ProxyOptions proxyOptions;
private AmqpRetryOptions retryOptions;
private Scheduler scheduler;
private AmqpTransportType transport;
private String fullyQualifiedNamespace;
private String eventHubName;
private String consumerGroup;
private EventHubConnectionProcessor eventHubConnectionProcessor;
private Integer prefetchCount;
private ClientOptions clientOptions;
private SslDomain.VerifyMode verifyMode;
private URL customEndpointAddress;
/**
* Keeps track of the open clients that were created from this builder when there is a shared connection.
*/
private final AtomicInteger openClients = new AtomicInteger();
/**
* Creates a new instance with the default transport {@link AmqpTransportType
* non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer
* created using the builder.
*/
public EventHubClientBuilder() {
transport = AmqpTransportType.AMQP;
}
/**
* Sets the credential information given a connection string to the Event Hub instance.
*
* <p>
* If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the
* desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal
* "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub".
* </p>
*
* <p>
* If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string
* from that Event Hub will result in a connection string that contains the name.
* </p>
*
* @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected
* that the Event Hub name and the shared access key properties are contained in this connection string.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code
* connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
@Override
public EventHubClientBuilder connectionString(String connectionString) {
ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential);
}
private TokenCredential getTokenCredential(ConnectionStringProperties properties) {
TokenCredential tokenCredential;
if (properties.getSharedAccessSignature() == null) {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(),
properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY);
} else {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature());
}
return tokenCredential;
}
/**
* Sets the client options.
*
* @param clientOptions The client options.
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential information given a connection string to the Event Hubs namespace and name to a specific
* Event Hub instance.
*
* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is
* expected that the shared access key properties are contained in this connection string, but not the Event Hub
* name.
* @param eventHubName The name of the Event Hub to connect the client to.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null.
* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or,
* if the {@code connectionString} contains the Event Hub name.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString, String eventHubName) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (connectionString.isEmpty()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"'connectionString' cannot be an empty string."));
} else if (eventHubName.isEmpty()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
if (!CoreUtils.isNullOrEmpty(properties.getEntityPath())
&& !eventHubName.equals(properties.getEntityPath())) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"'connectionString' contains an Event Hub name [%s] and it does not match the given "
+ "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. "
+ "Or supply a 'connectionString' without 'EntityPath' in it.",
properties.getEntityPath(), eventHubName)));
}
return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential);
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use
* {@link Configuration
*
* @param configuration The configuration store used to configure the {@link EventHubAsyncClient}.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network
* does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through
* an intermediary. For example: {@literal https:
* <p>
* If no port is specified, the default port for the {@link
* used.
*
* @param customEndpointAddress The custom endpoint address.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}.
*/
public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) {
if (customEndpointAddress == null) {
this.customEndpointAddress = null;
return this;
}
try {
this.customEndpointAddress = new URL(customEndpointAddress);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e));
}
return this;
}
/**
* Sets the fully qualified name for the Event Hubs namespace.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} is an empty string.
* @throws NullPointerException if {@code fullyQualifiedNamespace} is null.
*/
public EventHubClientBuilder fullyQualifiedNamespace(String fullyQualifiedNamespace) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string."));
}
return this;
}
private String getAndValidateFullyQualifiedNamespace() {
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string."));
}
return fullyQualifiedNamespace;
}
/**
* Sets the name of the Event Hub to connect the client to.
*
* @param eventHubName The name of the Event Hub to connect the client to.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code eventHubName} is an empty string.
* @throws NullPointerException if {@code eventHubName} is null.
*/
public EventHubClientBuilder eventHubName(String eventHubName) {
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
private String getEventHubName() {
if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return eventHubName;
}
/**
* Toggles the builder to use the same connection for producers or consumers that are built from this instance. By
* default, a new connection is constructed and used created for each Event Hub consumer or producer created.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder shareConnection() {
this.isSharedConnection.set(true);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
TokenCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java
* <a href="https:
* documentation for more details on proper usage of the {@link TokenCredential} type.
*
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code credentials} is null.
*/
@Override
public EventHubClientBuilder credential(TokenCredential credential) {
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The shared access name and key credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
AzureNamedKeyCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(),
credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param credential The shared access name and key credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code credentials} is null.
*/
@Override
public EventHubClientBuilder credential(AzureNamedKeyCredential credential) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(),
credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The shared access signature credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
AzureSasCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getSignature());
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param credential The shared access signature credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code credentials} is null.
*/
@Override
public EventHubClientBuilder credential(AzureSasCredential credential) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getSignature());
return this;
}
/**
* Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link
* AmqpTransportType
*
* @param proxyOptions The proxy configuration to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link
* AmqpTransportType
*
* @param transport The transport type to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder transportType(AmqpTransportType transport) {
this.transport = transport;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
* @deprecated Replaced by {@link
*/
@Deprecated
public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder retryOptions(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the name of the consumer group this consumer is associated with. Events are read in the context of this
* group. The name of the consumer group that is created by default is {@link
* "$Default"}.
*
* @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the
* context of this group. The name of the consumer group that is created by default is {@link
*
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder consumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
return this;
}
/**
* Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive
* and queue locally without regard to whether a receive operation is currently active.
*
* @param prefetchCount The amount of events to queue locally.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code prefetchCount} is less than {@link
* greater than {@link
*/
public EventHubClientBuilder prefetchCount(int prefetchCount) {
if (prefetchCount < MINIMUM_PREFETCH_COUNT) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT)));
}
if (prefetchCount > MAXIMUM_PREFETCH_COUNT) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT)));
}
this.prefetchCount = prefetchCount;
return this;
}
/**
* Package-private method that gets the prefetch count.
*
* @return Gets the prefetch count or {@code null} if it has not been set.
* @see
*/
Integer getPrefetchCount() {
return prefetchCount;
}
/**
* Package-private method that sets the scheduler for the created Event Hub client.
*
* @param scheduler Scheduler to set.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder scheduler(Scheduler scheduler) {
this.scheduler = scheduler;
return this;
}
/**
* Package-private method that sets the verify mode for this connection.
*
* @param verifyMode The verification mode.
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) {
this.verifyMode = verifyMode;
return this;
}
/**
* Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code
* buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created.
*
* @return A new {@link EventHubConsumerAsyncClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerAsyncClient buildAsyncConsumerClient() {
if (CoreUtils.isNullOrEmpty(consumerGroup)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty "
+ "string. using EventHubClientBuilder.consumerGroup(String)"));
}
return buildAsyncClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code
* buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created.
*
* @return A new {@link EventHubConsumerClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerClient buildConsumerClient() {
return buildClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created.
*
* @return A new {@link EventHubProducerAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerAsyncClient buildAsyncProducerClient() {
return buildAsyncClient().createProducer();
}
/**
* Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created.
*
* @return A new {@link EventHubProducerClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerClient buildProducerClient() {
return buildClient().createProducer();
}
/**
* Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* </ul>
*
* @return A new {@link EventHubAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
/**
* Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is
* invoked, a new instance of {@link EventHubClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubClient buildClient() {
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT;
}
final EventHubAsyncClient client = buildAsyncClient();
return new EventHubClient(client, retryOptions);
}
void onClientClose() {
synchronized (connectionLock) {
final int numberOfOpenClients = openClients.decrementAndGet();
LOGGER.info("Closing a dependent client.
if (numberOfOpenClients > 0) {
return;
}
if (numberOfOpenClients < 0) {
LOGGER.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients);
}
LOGGER.info("No more open clients, closing shared connection.");
if (eventHubConnectionProcessor != null) {
eventHubConnectionProcessor.dispose();
eventHubConnectionProcessor = null;
} else {
LOGGER.warning("Shared EventHubConnectionProcessor was already disposed.");
}
}
}
private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) {
final ConnectionOptions connectionOptions = getConnectionOptions();
final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> {
sink.onRequest(request -> {
if (request == 0) {
return;
} else if (request > 1) {
sink.error(LOGGER.logExceptionAsWarning(new IllegalArgumentException(
"Requested more than one connection. Only emitting one. Request: " + request)));
return;
}
final String connectionId = StringUtil.getRandomString("MF");
LOGGER.atInfo()
.addKeyValue(CONNECTION_ID_KEY, connectionId)
.log("Emitting a single connection.");
final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider(
connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(),
connectionOptions.getAuthorizationScope());
final ReactorProvider provider = new ReactorProvider();
final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);
final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId,
connectionOptions, getEventHubName(), provider, handlerProvider, tokenManagerProvider,
messageSerializer);
sink.next(connection);
});
});
return connectionFlux.subscribeWith(new EventHubConnectionProcessor(
connectionOptions.getFullyQualifiedNamespace(), getEventHubName(), connectionOptions.getRetry()));
}
private ConnectionOptions getConnectionOptions() {
Configuration buildConfiguration = configuration == null
? Configuration.getGlobalConfiguration().clone()
: configuration;
if (credentials == null) {
final String connectionString = buildConfiguration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. "
+ "They can be set using: connectionString(String), connectionString(String, String), "
+ "credentials(String, String, TokenCredential), or setting the environment variable '"
+ AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string"));
}
connectionString(connectionString);
}
if (proxyOptions == null) {
proxyOptions = getDefaultProxyConfiguration(buildConfiguration);
}
if (proxyOptions != null && proxyOptions.isProxyAddressConfigured()
&& transport != AmqpTransportType.AMQP_WEB_SOCKETS) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"Cannot use a proxy when TransportType is not AMQP Web Sockets."));
}
final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential
? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE
: CbsAuthorizationType.JSON_WEB_TOKEN;
final SslDomain.VerifyMode verificationMode = verifyMode != null
? verifyMode
: SslDomain.VerifyMode.VERIFY_PEER_NAME;
final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions();
final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE);
final String product = properties.getOrDefault(NAME_KEY, UNKNOWN);
final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN);
if (customEndpointAddress == null) {
return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType,
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler,
options, verificationMode, product, clientVersion);
} else {
return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType,
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler,
options, verificationMode, product, clientVersion, customEndpointAddress.getHost(),
customEndpointAddress.getPort());
}
}
private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) {
ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;
if (proxyOptions != null) {
authentication = proxyOptions.getAuthentication();
}
String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY);
if (CoreUtils.isNullOrEmpty(proxyAddress)) {
return ProxyOptions.SYSTEM_DEFAULTS;
}
return getProxyOptions(authentication, proxyAddress, configuration,
Boolean.parseBoolean(configuration.get("java.net.useSystemProxies")));
}
private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress,
Configuration configuration, boolean useSystemProxies) {
String host;
int port;
if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) {
final String[] hostPort = proxyAddress.split(":");
host = hostPort[0];
port = Integer.parseInt(hostPort[1]);
final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
final String username = configuration.get(ProxyOptions.PROXY_USERNAME);
final String password = configuration.get(ProxyOptions.PROXY_PASSWORD);
return new ProxyOptions(authentication, proxy, username, password);
} else if (useSystemProxies) {
com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions
.fromConfiguration(configuration);
Proxy.Type proxyType = coreProxyOptions.getType().toProxyType();
InetSocketAddress coreProxyAddress = coreProxyOptions.getAddress();
String username = coreProxyOptions.getUsername();
String password = coreProxyOptions.getPassword();
return new ProxyOptions(authentication, new Proxy(proxyType, coreProxyAddress), username, password);
} else {
LOGGER.verbose("'HTTP_PROXY' was configured but ignored as 'java.net.useSystemProxies' wasn't "
+ "set or was false.");
return ProxyOptions.SYSTEM_DEFAULTS;
}
}
} | class EventHubClientBuilder implements
TokenCredentialTrait<EventHubClientBuilder>,
AzureNamedKeyCredentialTrait<EventHubClientBuilder>,
ConnectionStringTrait<EventHubClientBuilder>,
AzureSasCredentialTrait<EventHubClientBuilder>,
AmqpTrait<EventHubClientBuilder>,
ConfigurationTrait<EventHubClientBuilder> {
static final int DEFAULT_PREFETCH_COUNT = 500;
static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1;
/**
* The name of the default consumer group in the Event Hubs service.
*/
public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default";
/**
* The minimum value allowed for the prefetch count of the consumer.
*/
private static final int MINIMUM_PREFETCH_COUNT = 1;
/**
* The maximum value allowed for the prefetch count of the consumer.
*/
private static final int MAXIMUM_PREFETCH_COUNT = 8000;
private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties";
private static final String NAME_KEY = "name";
private static final String VERSION_KEY = "version";
private static final String UNKNOWN = "UNKNOWN";
private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING";
private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions()
.setTryTimeout(ClientConstants.OPERATION_TIMEOUT);
private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+");
private static final ClientLogger LOGGER = new ClientLogger(EventHubClientBuilder.class);
private final Object connectionLock = new Object();
private final AtomicBoolean isSharedConnection = new AtomicBoolean();
private TokenCredential credentials;
private Configuration configuration;
private ProxyOptions proxyOptions;
private AmqpRetryOptions retryOptions;
private Scheduler scheduler;
private AmqpTransportType transport;
private String fullyQualifiedNamespace;
private String eventHubName;
private String consumerGroup;
private EventHubConnectionProcessor eventHubConnectionProcessor;
private Integer prefetchCount;
private ClientOptions clientOptions;
private SslDomain.VerifyMode verifyMode;
private URL customEndpointAddress;
/**
* Keeps track of the open clients that were created from this builder when there is a shared connection.
*/
private final AtomicInteger openClients = new AtomicInteger();
/**
* Creates a new instance with the default transport {@link AmqpTransportType
* non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer
* created using the builder.
*/
public EventHubClientBuilder() {
transport = AmqpTransportType.AMQP;
}
/**
* Sets the credential information given a connection string to the Event Hub instance.
*
* <p>
* If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the
* desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal
* "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub".
* </p>
*
* <p>
* If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string
* from that Event Hub will result in a connection string that contains the name.
* </p>
*
* @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected
* that the Event Hub name and the shared access key properties are contained in this connection string.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code
* connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
@Override
public EventHubClientBuilder connectionString(String connectionString) {
ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential);
}
private TokenCredential getTokenCredential(ConnectionStringProperties properties) {
TokenCredential tokenCredential;
if (properties.getSharedAccessSignature() == null) {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(),
properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY);
} else {
tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature());
}
return tokenCredential;
}
/**
* Sets the client options.
*
* @param clientOptions The client options.
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder clientOptions(ClientOptions clientOptions) {
this.clientOptions = clientOptions;
return this;
}
/**
* Sets the credential information given a connection string to the Event Hubs namespace and name to a specific
* Event Hub instance.
*
* @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is
* expected that the shared access key properties are contained in this connection string, but not the Event Hub
* name.
* @param eventHubName The name of the Event Hub to connect the client to.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null.
* @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or,
* if the {@code connectionString} contains the Event Hub name.
* @throws AzureException If the shared access signature token credential could not be created using the
* connection string.
*/
public EventHubClientBuilder connectionString(String connectionString, String eventHubName) {
Objects.requireNonNull(connectionString, "'connectionString' cannot be null.");
Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (connectionString.isEmpty()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"'connectionString' cannot be an empty string."));
} else if (eventHubName.isEmpty()) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString);
TokenCredential tokenCredential = getTokenCredential(properties);
if (!CoreUtils.isNullOrEmpty(properties.getEntityPath())
&& !eventHubName.equals(properties.getEntityPath())) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"'connectionString' contains an Event Hub name [%s] and it does not match the given "
+ "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. "
+ "Or supply a 'connectionString' without 'EntityPath' in it.",
properties.getEntityPath(), eventHubName)));
}
return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential);
}
/**
* Sets the configuration store that is used during construction of the service client.
*
* If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use
* {@link Configuration
*
* @param configuration The configuration store used to configure the {@link EventHubAsyncClient}.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder configuration(Configuration configuration) {
this.configuration = configuration;
return this;
}
/**
* Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network
* does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through
* an intermediary. For example: {@literal https:
* <p>
* If no port is specified, the default port for the {@link
* used.
*
* @param customEndpointAddress The custom endpoint address.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}.
*/
public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) {
if (customEndpointAddress == null) {
this.customEndpointAddress = null;
return this;
}
try {
this.customEndpointAddress = new URL(customEndpointAddress);
} catch (MalformedURLException e) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e));
}
return this;
}
/**
* Sets the fully qualified name for the Event Hubs namespace.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} is an empty string.
* @throws NullPointerException if {@code fullyQualifiedNamespace} is null.
*/
public EventHubClientBuilder fullyQualifiedNamespace(String fullyQualifiedNamespace) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string."));
}
return this;
}
private String getAndValidateFullyQualifiedNamespace() {
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(
new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string."));
}
return fullyQualifiedNamespace;
}
/**
* Sets the name of the Event Hub to connect the client to.
*
* @param eventHubName The name of the Event Hub to connect the client to.
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code eventHubName} is an empty string.
* @throws NullPointerException if {@code eventHubName} is null.
*/
public EventHubClientBuilder eventHubName(String eventHubName) {
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
private String getEventHubName() {
if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return eventHubName;
}
/**
* Toggles the builder to use the same connection for producers or consumers that are built from this instance. By
* default, a new connection is constructed and used created for each Event Hub consumer or producer created.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder shareConnection() {
this.isSharedConnection.set(true);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
TokenCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
return this;
}
/**
* Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java
* <a href="https:
* documentation for more details on proper usage of the {@link TokenCredential} type.
*
* @param credential The token credential to use for authorization. Access controls may be specified by the
* Event Hubs namespace or the requested Event Hub, depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code credentials} is null.
*/
@Override
public EventHubClientBuilder credential(TokenCredential credential) {
this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null.");
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The shared access name and key credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
AzureNamedKeyCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(),
credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param credential The shared access name and key credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code credentials} is null.
*/
@Override
public EventHubClientBuilder credential(AzureNamedKeyCredential credential) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(),
credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY);
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be
* similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>.
* @param eventHubName The name of the Event Hub to connect the client to.
* @param credential The shared access signature credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty
* string.
* @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is
* null.
*/
public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName,
AzureSasCredential credential) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string."));
} else if (CoreUtils.isNullOrEmpty(eventHubName)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string."));
}
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getSignature());
return this;
}
/**
* Sets the credential information for which Event Hub instance to connect to, and how to authorize against it.
*
* @param credential The shared access signature credential to use for authorization.
* Access controls may be specified by the Event Hubs namespace or the requested Event Hub,
* depending on Azure configuration.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws NullPointerException if {@code credentials} is null.
*/
@Override
public EventHubClientBuilder credential(AzureSasCredential credential) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
this.credentials = new EventHubSharedKeyCredential(credential.getSignature());
return this;
}
/**
* Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link
* AmqpTransportType
*
* @param proxyOptions The proxy configuration to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link
* AmqpTransportType
*
* @param transport The transport type to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder transportType(AmqpTransportType transport) {
this.transport = transport;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
* @deprecated Replaced by {@link
*/
@Deprecated
public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used.
*
* @param retryOptions The retry policy to use.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
@Override
public EventHubClientBuilder retryOptions(AmqpRetryOptions retryOptions) {
this.retryOptions = retryOptions;
return this;
}
/**
* Sets the name of the consumer group this consumer is associated with. Events are read in the context of this
* group. The name of the consumer group that is created by default is {@link
* "$Default"}.
*
* @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the
* context of this group. The name of the consumer group that is created by default is {@link
*
*
* @return The updated {@link EventHubClientBuilder} object.
*/
public EventHubClientBuilder consumerGroup(String consumerGroup) {
this.consumerGroup = consumerGroup;
return this;
}
/**
* Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive
* and queue locally without regard to whether a receive operation is currently active.
*
* @param prefetchCount The amount of events to queue locally.
*
* @return The updated {@link EventHubClientBuilder} object.
* @throws IllegalArgumentException if {@code prefetchCount} is less than {@link
* greater than {@link
*/
public EventHubClientBuilder prefetchCount(int prefetchCount) {
if (prefetchCount < MINIMUM_PREFETCH_COUNT) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT)));
}
if (prefetchCount > MAXIMUM_PREFETCH_COUNT) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US,
"PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT)));
}
this.prefetchCount = prefetchCount;
return this;
}
/**
* Package-private method that gets the prefetch count.
*
* @return Gets the prefetch count or {@code null} if it has not been set.
* @see
*/
Integer getPrefetchCount() {
return prefetchCount;
}
/**
* Package-private method that sets the scheduler for the created Event Hub client.
*
* @param scheduler Scheduler to set.
*
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder scheduler(Scheduler scheduler) {
this.scheduler = scheduler;
return this;
}
/**
* Package-private method that sets the verify mode for this connection.
*
* @param verifyMode The verification mode.
* @return The updated {@link EventHubClientBuilder} object.
*/
EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) {
this.verifyMode = verifyMode;
return this;
}
/**
* Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code
* buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created.
*
* @return A new {@link EventHubConsumerAsyncClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerAsyncClient buildAsyncConsumerClient() {
if (CoreUtils.isNullOrEmpty(consumerGroup)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty "
+ "string. using EventHubClientBuilder.consumerGroup(String)"));
}
return buildAsyncClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code
* buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created.
*
* @return A new {@link EventHubConsumerClient} with the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* {@link
* {@link AmqpTransportType
*/
public EventHubConsumerClient buildConsumerClient() {
return buildClient().createConsumer(consumerGroup, prefetchCount);
}
/**
* Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created.
*
* @return A new {@link EventHubProducerAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerAsyncClient buildAsyncProducerClient() {
return buildAsyncClient().createProducer();
}
/**
* Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code
* buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created.
*
* @return A new {@link EventHubProducerClient} instance with all the configured options.
* @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using
* either {@link
* proxy is specified but the transport type is not {@link AmqpTransportType
*/
public EventHubProducerClient buildProducerClient() {
return buildClient().createProducer();
}
/**
* Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code
* buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* </ul>
*
* @return A new {@link EventHubAsyncClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
/**
* Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is
* invoked, a new instance of {@link EventHubClient} is created.
*
* <p>
* The following options are used if ones are not specified in the builder:
*
* <ul>
* <li>If no configuration is specified, the {@link Configuration
* is used to provide any shared configuration values. The configuration values read are the {@link
* Configuration
* ProxyOptions
* <li>If no retry is specified, the default retry options are used.</li>
* <li>If no proxy is specified, the builder checks the {@link Configuration
* configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li>
* <li>If no timeout is specified, a {@link ClientConstants
* <li>If no scheduler is specified, an {@link Schedulers
* </ul>
*
* @return A new {@link EventHubClient} instance with all the configured options.
* @throws IllegalArgumentException if the credentials have not been set using either {@link
*
* specified but the transport type is not {@link AmqpTransportType
*/
EventHubClient buildClient() {
if (prefetchCount == null) {
prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT;
}
final EventHubAsyncClient client = buildAsyncClient();
return new EventHubClient(client, retryOptions);
}
void onClientClose() {
synchronized (connectionLock) {
final int numberOfOpenClients = openClients.decrementAndGet();
LOGGER.info("Closing a dependent client.
if (numberOfOpenClients > 0) {
return;
}
if (numberOfOpenClients < 0) {
LOGGER.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients);
}
LOGGER.info("No more open clients, closing shared connection.");
if (eventHubConnectionProcessor != null) {
eventHubConnectionProcessor.dispose();
eventHubConnectionProcessor = null;
} else {
LOGGER.warning("Shared EventHubConnectionProcessor was already disposed.");
}
}
}
private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) {
final ConnectionOptions connectionOptions = getConnectionOptions();
final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> {
sink.onRequest(request -> {
if (request == 0) {
return;
} else if (request > 1) {
sink.error(LOGGER.logExceptionAsWarning(new IllegalArgumentException(
"Requested more than one connection. Only emitting one. Request: " + request)));
return;
}
final String connectionId = StringUtil.getRandomString("MF");
LOGGER.atInfo()
.addKeyValue(CONNECTION_ID_KEY, connectionId)
.log("Emitting a single connection.");
final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider(
connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(),
connectionOptions.getAuthorizationScope());
final ReactorProvider provider = new ReactorProvider();
final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider);
final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId,
connectionOptions, getEventHubName(), provider, handlerProvider, tokenManagerProvider,
messageSerializer);
sink.next(connection);
});
});
return connectionFlux.subscribeWith(new EventHubConnectionProcessor(
connectionOptions.getFullyQualifiedNamespace(), getEventHubName(), connectionOptions.getRetry()));
}
private ConnectionOptions getConnectionOptions() {
Configuration buildConfiguration = configuration == null
? Configuration.getGlobalConfiguration().clone()
: configuration;
if (credentials == null) {
final String connectionString = buildConfiguration.get(AZURE_EVENT_HUBS_CONNECTION_STRING);
if (CoreUtils.isNullOrEmpty(connectionString)) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. "
+ "They can be set using: connectionString(String), connectionString(String, String), "
+ "credentials(String, String, TokenCredential), or setting the environment variable '"
+ AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string"));
}
connectionString(connectionString);
}
if (proxyOptions == null) {
proxyOptions = getDefaultProxyConfiguration(buildConfiguration);
}
if (proxyOptions != null && proxyOptions.isProxyAddressConfigured()
&& transport != AmqpTransportType.AMQP_WEB_SOCKETS) {
throw LOGGER.logExceptionAsError(new IllegalArgumentException(
"Cannot use a proxy when TransportType is not AMQP Web Sockets."));
}
final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential
? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE
: CbsAuthorizationType.JSON_WEB_TOKEN;
final SslDomain.VerifyMode verificationMode = verifyMode != null
? verifyMode
: SslDomain.VerifyMode.VERIFY_PEER_NAME;
final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions();
final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE);
final String product = properties.getOrDefault(NAME_KEY, UNKNOWN);
final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN);
if (customEndpointAddress == null) {
return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType,
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler,
options, verificationMode, product, clientVersion);
} else {
return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType,
ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler,
options, verificationMode, product, clientVersion, customEndpointAddress.getHost(),
customEndpointAddress.getPort());
}
}
private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) {
ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE;
if (proxyOptions != null) {
authentication = proxyOptions.getAuthentication();
}
String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY);
if (CoreUtils.isNullOrEmpty(proxyAddress)) {
return ProxyOptions.SYSTEM_DEFAULTS;
}
return getProxyOptions(authentication, proxyAddress, configuration,
Boolean.parseBoolean(configuration.get("java.net.useSystemProxies")));
}
private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress,
Configuration configuration, boolean useSystemProxies) {
String host;
int port;
if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) {
final String[] hostPort = proxyAddress.split(":");
host = hostPort[0];
port = Integer.parseInt(hostPort[1]);
final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port));
final String username = configuration.get(ProxyOptions.PROXY_USERNAME);
final String password = configuration.get(ProxyOptions.PROXY_PASSWORD);
return new ProxyOptions(authentication, proxy, username, password);
} else if (useSystemProxies) {
com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions
.fromConfiguration(configuration);
Proxy.Type proxyType = coreProxyOptions.getType().toProxyType();
InetSocketAddress coreProxyAddress = coreProxyOptions.getAddress();
String username = coreProxyOptions.getUsername();
String password = coreProxyOptions.getPassword();
return new ProxyOptions(authentication, new Proxy(proxyType, coreProxyAddress), username, password);
} else {
LOGGER.verbose("'HTTP_PROXY' was configured but ignored as 'java.net.useSystemProxies' wasn't "
+ "set or was false.");
return ProxyOptions.SYSTEM_DEFAULTS;
}
}
} |
Do we need this previous constructor? Will this end up confusing users/making things error prone? | public BLangDiagnosticLocation(String filePath, int startLine, int endLine, int startColumn, int endColumn) {
this.lineRange = LineRange.from(filePath, LinePosition.from(startLine, startColumn),
LinePosition.from(endLine, endColumn));
this.textRange = TextRange.from(0, 0);
} | } | public BLangDiagnosticLocation(String filePath, int startLine, int endLine, int startColumn, int endColumn) {
this.lineRange = LineRange.from(filePath, LinePosition.from(startLine, startColumn),
LinePosition.from(endLine, endColumn));
this.textRange = TextRange.from(0, 0);
} | class BLangDiagnosticLocation implements Location {
private LineRange lineRange;
private TextRange textRange;
public BLangDiagnosticLocation(String filePath, int startLine, int endLine, int startColumn, int endColumn,
int startOffset, int length) {
this.lineRange = LineRange.from(filePath, LinePosition.from(startLine, startColumn),
LinePosition.from(endLine, endColumn));
this.textRange = TextRange.from(startOffset, length);
}
@Override
public LineRange lineRange() {
return lineRange;
}
@Override
public TextRange textRange() {
return textRange;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BLangDiagnosticLocation) {
BLangDiagnosticLocation location = (BLangDiagnosticLocation) obj;
return lineRange.equals(location.lineRange) && textRange.equals(location.textRange);
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(lineRange, textRange);
}
@Override
public String toString() {
return lineRange.toString() + textRange.toString();
}
} | class BLangDiagnosticLocation implements Location {
private LineRange lineRange;
private TextRange textRange;
@Deprecated
public BLangDiagnosticLocation(String filePath, int startLine, int endLine, int startColumn, int endColumn,
int startOffset, int length) {
this.lineRange = LineRange.from(filePath, LinePosition.from(startLine, startColumn),
LinePosition.from(endLine, endColumn));
this.textRange = TextRange.from(startOffset, length);
}
@Override
public LineRange lineRange() {
return lineRange;
}
@Override
public TextRange textRange() {
return textRange;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BLangDiagnosticLocation) {
BLangDiagnosticLocation location = (BLangDiagnosticLocation) obj;
return lineRange.equals(location.lineRange) && textRange.equals(location.textRange);
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(lineRange, textRange);
}
@Override
public String toString() {
return lineRange.toString() + textRange.toString();
}
} |
The code for the source metric tests looks very similar can you try to generify this a bit? | public void testMetrics() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
int numSplits = 2;
int parallelism = 4;
env.setParallelism(parallelism);
int numRecordsPerSplit = 10;
SharedReference<CyclicBarrier> beforeBarrier =
sharedObjects.add(new CyclicBarrier(numSplits + 1));
SharedReference<CyclicBarrier> afterBarrier =
sharedObjects.add(new CyclicBarrier(numSplits + 1));
int stopAtRecord1 = 4;
int stopAtRecord2 = numRecordsPerSplit - 1;
env.fromSequence(0, numSplits)
.<Long>flatMap(
(split, collector) ->
LongStream.range(0, numRecordsPerSplit).forEach(collector::collect))
.returns(BasicTypeInfo.LONG_TYPE_INFO)
.map(
i -> {
if (i % numRecordsPerSplit == stopAtRecord1
|| i % numRecordsPerSplit == stopAtRecord2) {
beforeBarrier.get().await();
afterBarrier.get().await();
}
return i;
})
.sinkTo(TestSink.newBuilder().setWriter(new MetricWriter()).build())
.name("TestSink");
JobClient jobClient = env.executeAsync();
beforeBarrier.get().await();
assertSinkMetrics(stopAtRecord1, env.getParallelism(), numSplits);
afterBarrier.get().await();
beforeBarrier.get().await();
assertSinkMetrics(stopAtRecord2, env.getParallelism(), numSplits);
afterBarrier.get().await();
jobClient.getJobExecutionResult().get();
} | int parallelism = 4; | public void testMetrics() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
int numSplits = Math.max(1, env.getParallelism() - 2);
int numRecordsPerSplit = 10;
SharedReference<CyclicBarrier> beforeBarrier =
sharedObjects.add(new CyclicBarrier(numSplits + 1));
SharedReference<CyclicBarrier> afterBarrier =
sharedObjects.add(new CyclicBarrier(numSplits + 1));
int stopAtRecord1 = 4;
int stopAtRecord2 = numRecordsPerSplit - 1;
env.fromSequence(0, numSplits - 1)
.<Long>flatMap(
(split, collector) ->
LongStream.range(0, numRecordsPerSplit).forEach(collector::collect))
.returns(BasicTypeInfo.LONG_TYPE_INFO)
.map(
i -> {
if (i % numRecordsPerSplit == stopAtRecord1
|| i % numRecordsPerSplit == stopAtRecord2) {
beforeBarrier.get().await();
afterBarrier.get().await();
}
return i;
})
.sinkTo(TestSink.newBuilder().setWriter(new MetricWriter()).build())
.name("TestSink");
JobClient jobClient = env.executeAsync();
beforeBarrier.get().await();
assertSinkMetrics(stopAtRecord1, env.getParallelism(), numSplits);
afterBarrier.get().await();
beforeBarrier.get().await();
assertSinkMetrics(stopAtRecord2, env.getParallelism(), numSplits);
afterBarrier.get().await();
jobClient.getJobExecutionResult().get();
} | class SinkITCase extends AbstractTestBase {
@Rule public final SharedObjects sharedObjects = SharedObjects.create();
static final List<Integer> SOURCE_DATA =
Arrays.asList(
895, 127, 148, 161, 148, 662, 822, 491, 275, 122, 850, 630, 682, 765, 434, 970,
714, 795, 288, 422);
static final int STREAMING_SOURCE_SEND_ELEMENTS_NUM = SOURCE_DATA.size() * 2;
static final List<String> EXPECTED_COMMITTED_DATA_IN_STREAMING_MODE =
SOURCE_DATA.stream()
.flatMap(
x ->
Collections.nCopies(
2, Tuple3.of(x, null, Long.MIN_VALUE).toString())
.stream())
.collect(Collectors.toList());
static final List<String> EXPECTED_COMMITTED_DATA_IN_BATCH_MODE =
SOURCE_DATA.stream()
.map(x -> Tuple3.of(x, null, Long.MIN_VALUE).toString())
.collect(Collectors.toList());
static final List<String> EXPECTED_GLOBAL_COMMITTED_DATA_IN_STREAMING_MODE =
SOURCE_DATA.stream()
.flatMap(
x ->
Collections.nCopies(
2, Tuple3.of(x, null, Long.MIN_VALUE).toString())
.stream())
.collect(Collectors.toList());
static final List<String> EXPECTED_GLOBAL_COMMITTED_DATA_IN_BATCH_MODE =
Arrays.asList(
SOURCE_DATA.stream()
.map(x -> Tuple3.of(x, null, Long.MIN_VALUE).toString())
.sorted()
.collect(joining("+")),
END_OF_INPUT_STR);
static final Queue<String> COMMIT_QUEUE = new ConcurrentLinkedQueue<>();
static final Queue<String> GLOBAL_COMMIT_QUEUE = new ConcurrentLinkedQueue<>();
static final BooleanSupplier COMMIT_QUEUE_RECEIVE_ALL_DATA =
(BooleanSupplier & Serializable)
() -> COMMIT_QUEUE.size() == STREAMING_SOURCE_SEND_ELEMENTS_NUM;
static final BooleanSupplier GLOBAL_COMMIT_QUEUE_RECEIVE_ALL_DATA =
(BooleanSupplier & Serializable)
() ->
getSplittedGlobalCommittedData().size()
== STREAMING_SOURCE_SEND_ELEMENTS_NUM;
static final BooleanSupplier BOTH_QUEUE_RECEIVE_ALL_DATA =
(BooleanSupplier & Serializable)
() ->
COMMIT_QUEUE_RECEIVE_ALL_DATA.getAsBoolean()
&& GLOBAL_COMMIT_QUEUE_RECEIVE_ALL_DATA.getAsBoolean();
@Before
public void init() {
COMMIT_QUEUE.clear();
GLOBAL_COMMIT_QUEUE.clear();
}
@Test
public void writerAndCommitterAndGlobalCommitterExecuteInStreamingMode() throws Exception {
final StreamExecutionEnvironment env = buildStreamEnv();
final FiniteTestSource<Integer> source =
new FiniteTestSource<>(BOTH_QUEUE_RECEIVE_ALL_DATA, SOURCE_DATA);
env.addSource(source, IntegerTypeInfo.INT_TYPE_INFO)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
GLOBAL_COMMIT_QUEUE.remove(END_OF_INPUT_STR);
assertThat(
COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
assertThat(
getSplittedGlobalCommittedData(),
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
}
@Test
public void writerAndCommitterAndGlobalCommitterExecuteInBatchMode() throws Exception {
final StreamExecutionEnvironment env = buildBatchEnv();
env.fromCollection(SOURCE_DATA)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
assertThat(
COMMIT_QUEUE, containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
assertThat(
GLOBAL_COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
}
@Test
public void writerAndCommitterExecuteInStreamingMode() throws Exception {
final StreamExecutionEnvironment env = buildStreamEnv();
final FiniteTestSource<Integer> source =
new FiniteTestSource<>(COMMIT_QUEUE_RECEIVE_ALL_DATA, SOURCE_DATA);
env.addSource(source, IntegerTypeInfo.INT_TYPE_INFO)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.build());
env.execute();
assertThat(
COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
}
@Test
public void writerAndCommitterExecuteInBatchMode() throws Exception {
final StreamExecutionEnvironment env = buildBatchEnv();
env.fromCollection(SOURCE_DATA)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.build());
env.execute();
assertThat(
COMMIT_QUEUE, containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
}
@Test
public void writerAndGlobalCommitterExecuteInStreamingMode() throws Exception {
final StreamExecutionEnvironment env = buildStreamEnv();
final FiniteTestSource<Integer> source =
new FiniteTestSource<>(GLOBAL_COMMIT_QUEUE_RECEIVE_ALL_DATA, SOURCE_DATA);
env.addSource(source, IntegerTypeInfo.INT_TYPE_INFO)
.sinkTo(
TestSink.newBuilder()
.setCommittableSerializer(
TestSink.StringCommittableSerializer.INSTANCE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
GLOBAL_COMMIT_QUEUE.remove(END_OF_INPUT_STR);
assertThat(
getSplittedGlobalCommittedData(),
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
}
@Test
public void writerAndGlobalCommitterExecuteInBatchMode() throws Exception {
final StreamExecutionEnvironment env = buildBatchEnv();
env.fromCollection(SOURCE_DATA)
.sinkTo(
TestSink.newBuilder()
.setCommittableSerializer(
TestSink.StringCommittableSerializer.INSTANCE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
assertThat(
GLOBAL_COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
}
@Test
private void assertSinkMetrics(
long processedRecordsPerSubtask, int parallelism, int numSplits) {
List<OperatorMetricGroup> groups =
miniClusterResource.getMetricReporter().findOperatorMetricGroups("TestSink");
assertThat(groups, hasSize(parallelism));
int subtaskWithMetrics = 0;
for (OperatorMetricGroup group : groups) {
Map<String, Metric> metrics =
miniClusterResource.getMetricReporter().getMetricsByGroup(group);
if (group.getIOMetricGroup().getNumRecordsOutCounter().getCount() == 0) {
assertThat(metrics.get(MetricNames.CURRENT_SEND_TIME), nullValue());
continue;
}
subtaskWithMetrics++;
assertThat(
group.getIOMetricGroup().getNumRecordsOutCounter(),
isCounter(equalTo(processedRecordsPerSubtask)));
assertThat(
group.getIOMetricGroup().getNumBytesOutCounter(),
isCounter(
equalTo(
processedRecordsPerSubtask
* MetricWriter.RECORD_SIZE_IN_BYTES)));
assertThat(
metrics.get(MetricNames.NUM_RECORDS_OUT_ERRORS),
isCounter(equalTo(processedRecordsPerSubtask / 2)));
assertThat(
metrics.get(MetricNames.CURRENT_SEND_TIME),
isGauge(
equalTo(
(processedRecordsPerSubtask - 1)
* MetricWriter.BASE_SEND_TIME)));
}
assertThat(subtaskWithMetrics, equalTo(numSplits));
}
private static List<String> getSplittedGlobalCommittedData() {
return GLOBAL_COMMIT_QUEUE.stream()
.flatMap(x -> Arrays.stream(x.split("\\+")))
.collect(Collectors.toList());
}
private StreamExecutionEnvironment buildStreamEnv() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
env.enableCheckpointing(100);
return env;
}
private StreamExecutionEnvironment buildBatchEnv() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
return env;
}
private static class MetricWriter extends TestSink.DefaultSinkWriter<Long> {
static final long BASE_SEND_TIME = 100;
static final long RECORD_SIZE_IN_BYTES = 10;
private SinkWriterMetricGroup metricGroup;
private long sendTime;
@Override
public void init(Sink.InitContext context) {
this.metricGroup = context.metricGroup();
metricGroup.setCurrentSendTimeGauge(() -> sendTime);
}
@Override
public void write(Long element, Context context) {
super.write(element, context);
sendTime = element * BASE_SEND_TIME;
if (element % 2 == 0) {
metricGroup.getNumRecordsOutErrorsCounter().inc();
}
metricGroup.getIOMetricGroup().getNumBytesOutCounter().inc(RECORD_SIZE_IN_BYTES);
}
}
} | class SinkITCase extends AbstractTestBase {
private static final Logger LOG = LoggerFactory.getLogger(SinkITCase.class);
@Rule public final SharedObjects sharedObjects = SharedObjects.create();
@Rule public final InMemoryReporterRule inMemoryReporter = InMemoryReporterRule.create();
static final List<Integer> SOURCE_DATA =
Arrays.asList(
895, 127, 148, 161, 148, 662, 822, 491, 275, 122, 850, 630, 682, 765, 434, 970,
714, 795, 288, 422);
static final int STREAMING_SOURCE_SEND_ELEMENTS_NUM = SOURCE_DATA.size() * 2;
static final List<String> EXPECTED_COMMITTED_DATA_IN_STREAMING_MODE =
SOURCE_DATA.stream()
.flatMap(
x ->
Collections.nCopies(
2, Tuple3.of(x, null, Long.MIN_VALUE).toString())
.stream())
.collect(Collectors.toList());
static final List<String> EXPECTED_COMMITTED_DATA_IN_BATCH_MODE =
SOURCE_DATA.stream()
.map(x -> Tuple3.of(x, null, Long.MIN_VALUE).toString())
.collect(Collectors.toList());
static final List<String> EXPECTED_GLOBAL_COMMITTED_DATA_IN_STREAMING_MODE =
SOURCE_DATA.stream()
.flatMap(
x ->
Collections.nCopies(
2, Tuple3.of(x, null, Long.MIN_VALUE).toString())
.stream())
.collect(Collectors.toList());
static final List<String> EXPECTED_GLOBAL_COMMITTED_DATA_IN_BATCH_MODE =
Arrays.asList(
SOURCE_DATA.stream()
.map(x -> Tuple3.of(x, null, Long.MIN_VALUE).toString())
.sorted()
.collect(joining("+")),
END_OF_INPUT_STR);
static final Queue<String> COMMIT_QUEUE = new ConcurrentLinkedQueue<>();
static final Queue<String> GLOBAL_COMMIT_QUEUE = new ConcurrentLinkedQueue<>();
static final BooleanSupplier COMMIT_QUEUE_RECEIVE_ALL_DATA =
(BooleanSupplier & Serializable)
() -> COMMIT_QUEUE.size() == STREAMING_SOURCE_SEND_ELEMENTS_NUM;
static final BooleanSupplier GLOBAL_COMMIT_QUEUE_RECEIVE_ALL_DATA =
(BooleanSupplier & Serializable)
() ->
getSplittedGlobalCommittedData().size()
== STREAMING_SOURCE_SEND_ELEMENTS_NUM;
static final BooleanSupplier BOTH_QUEUE_RECEIVE_ALL_DATA =
(BooleanSupplier & Serializable)
() ->
COMMIT_QUEUE_RECEIVE_ALL_DATA.getAsBoolean()
&& GLOBAL_COMMIT_QUEUE_RECEIVE_ALL_DATA.getAsBoolean();
@Before
public void init() {
COMMIT_QUEUE.clear();
GLOBAL_COMMIT_QUEUE.clear();
}
@Test
public void writerAndCommitterAndGlobalCommitterExecuteInStreamingMode() throws Exception {
final StreamExecutionEnvironment env = buildStreamEnv();
final FiniteTestSource<Integer> source =
new FiniteTestSource<>(BOTH_QUEUE_RECEIVE_ALL_DATA, SOURCE_DATA);
env.addSource(source, IntegerTypeInfo.INT_TYPE_INFO)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
GLOBAL_COMMIT_QUEUE.remove(END_OF_INPUT_STR);
assertThat(
COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
assertThat(
getSplittedGlobalCommittedData(),
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
}
@Test
public void writerAndCommitterAndGlobalCommitterExecuteInBatchMode() throws Exception {
final StreamExecutionEnvironment env = buildBatchEnv();
env.fromCollection(SOURCE_DATA)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
assertThat(
COMMIT_QUEUE, containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
assertThat(
GLOBAL_COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
}
@Test
public void writerAndCommitterExecuteInStreamingMode() throws Exception {
final StreamExecutionEnvironment env = buildStreamEnv();
final FiniteTestSource<Integer> source =
new FiniteTestSource<>(COMMIT_QUEUE_RECEIVE_ALL_DATA, SOURCE_DATA);
env.addSource(source, IntegerTypeInfo.INT_TYPE_INFO)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.build());
env.execute();
assertThat(
COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
}
@Test
public void writerAndCommitterExecuteInBatchMode() throws Exception {
final StreamExecutionEnvironment env = buildBatchEnv();
env.fromCollection(SOURCE_DATA)
.sinkTo(
TestSink.newBuilder()
.setDefaultCommitter(
(Supplier<Queue<String>> & Serializable) () -> COMMIT_QUEUE)
.build());
env.execute();
assertThat(
COMMIT_QUEUE, containsInAnyOrder(EXPECTED_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
}
@Test
public void writerAndGlobalCommitterExecuteInStreamingMode() throws Exception {
final StreamExecutionEnvironment env = buildStreamEnv();
final FiniteTestSource<Integer> source =
new FiniteTestSource<>(GLOBAL_COMMIT_QUEUE_RECEIVE_ALL_DATA, SOURCE_DATA);
env.addSource(source, IntegerTypeInfo.INT_TYPE_INFO)
.sinkTo(
TestSink.newBuilder()
.setCommittableSerializer(
TestSink.StringCommittableSerializer.INSTANCE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
GLOBAL_COMMIT_QUEUE.remove(END_OF_INPUT_STR);
assertThat(
getSplittedGlobalCommittedData(),
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_STREAMING_MODE.toArray()));
}
@Test
public void writerAndGlobalCommitterExecuteInBatchMode() throws Exception {
final StreamExecutionEnvironment env = buildBatchEnv();
env.fromCollection(SOURCE_DATA)
.sinkTo(
TestSink.newBuilder()
.setCommittableSerializer(
TestSink.StringCommittableSerializer.INSTANCE)
.setGlobalCommitter(
(Supplier<Queue<String>> & Serializable)
() -> GLOBAL_COMMIT_QUEUE)
.build());
env.execute();
assertThat(
GLOBAL_COMMIT_QUEUE,
containsInAnyOrder(EXPECTED_GLOBAL_COMMITTED_DATA_IN_BATCH_MODE.toArray()));
}
@Test
private void assertSinkMetrics(
long processedRecordsPerSubtask, int parallelism, int numSplits) {
List<OperatorMetricGroup> groups =
inMemoryReporter.getReporter().findOperatorMetricGroups("TestSink");
LOG.info(
"groups: {}",
groups.stream()
.map(g -> Arrays.toString(g.getScopeComponents()))
.collect(Collectors.toList()));
int subtaskWithMetrics = 0;
for (OperatorMetricGroup group : groups) {
Map<String, Metric> metrics = inMemoryReporter.getReporter().getMetricsByGroup(group);
if (group.getIOMetricGroup().getNumRecordsOutCounter().getCount() == 0) {
continue;
}
subtaskWithMetrics++;
assertThat(
group.getIOMetricGroup().getNumRecordsOutCounter(),
isCounter(equalTo(processedRecordsPerSubtask)));
assertThat(
group.getIOMetricGroup().getNumBytesOutCounter(),
isCounter(
equalTo(
processedRecordsPerSubtask
* MetricWriter.RECORD_SIZE_IN_BYTES)));
assertThat(
metrics.get(MetricNames.NUM_RECORDS_OUT_ERRORS),
isCounter(equalTo((processedRecordsPerSubtask + 1) / 2)));
assertThat(
metrics.get(MetricNames.CURRENT_SEND_TIME),
isGauge(
equalTo(
(processedRecordsPerSubtask - 1)
* MetricWriter.BASE_SEND_TIME)));
}
assertThat(subtaskWithMetrics, equalTo(numSplits));
}
private static List<String> getSplittedGlobalCommittedData() {
return GLOBAL_COMMIT_QUEUE.stream()
.flatMap(x -> Arrays.stream(x.split("\\+")))
.collect(Collectors.toList());
}
private StreamExecutionEnvironment buildStreamEnv() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.STREAMING);
env.enableCheckpointing(100);
return env;
}
private StreamExecutionEnvironment buildBatchEnv() {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.BATCH);
return env;
}
private static class MetricWriter extends TestSink.DefaultSinkWriter<Long> {
static final long BASE_SEND_TIME = 100;
static final long RECORD_SIZE_IN_BYTES = 10;
private SinkWriterMetricGroup metricGroup;
private long sendTime;
@Override
public void init(Sink.InitContext context) {
this.metricGroup = context.metricGroup();
metricGroup.setCurrentSendTimeGauge(() -> sendTime);
}
@Override
public void write(Long element, Context context) {
super.write(element, context);
sendTime = element * BASE_SEND_TIME;
if (element % 2 == 0) {
metricGroup.getNumRecordsOutErrorsCounter().inc();
}
metricGroup.getIOMetricGroup().getNumBytesOutCounter().inc(RECORD_SIZE_IN_BYTES);
}
}
} |
`jsonRow != null`? So what will be happen when `jsonRow` is null? | public RowData readRecord(RowData reuse, byte[] bytes, int offset, int numBytes) throws IOException {
GenericRowData returnRecord = rowData;
if (this.getDelimiter() != null && this.getDelimiter().length == 1
&& this.getDelimiter()[0] == NEW_LINE && offset + numBytes >= 1
&& bytes[offset + numBytes - 1] == CARRIAGE_RETURN){
numBytes -= 1;
}
byte[] trimBytes = Arrays.copyOfRange(bytes, offset, offset + numBytes);
GenericRowData jsonRow = (GenericRowData) deserializationSchema.deserialize(trimBytes);
if (jsonRow != null) {
for (int i = 0; i < jsonSelectFieldNames.size(); i++) {
returnRecord.setField(jsonFieldMapping[i], jsonRow.getField(i));
}
}
emitted++;
return returnRecord;
} | if (jsonRow != null) { | public RowData readRecord(RowData reuse, byte[] bytes, int offset, int numBytes) throws IOException {
if (this.getDelimiter() != null && this.getDelimiter().length == 1
&& this.getDelimiter()[0] == NEW_LINE && offset + numBytes >= 1
&& bytes[offset + numBytes - 1] == CARRIAGE_RETURN) {
numBytes -= 1;
}
byte[] trimBytes = Arrays.copyOfRange(bytes, offset, offset + numBytes);
GenericRowData jsonRow = (GenericRowData) deserializationSchema.deserialize(trimBytes);
if (jsonRow == null) {
return null;
}
GenericRowData returnRecord = rowData;
for (int i = 0; i < jsonSelectFieldToJsonFieldMapping.length; i++) {
returnRecord.setField(jsonSelectFieldToProjectFieldMapping[i],
jsonRow.getField(jsonSelectFieldToJsonFieldMapping[i]));
}
emitted++;
return returnRecord;
} | class JsonInputFormat extends DelimitedInputFormat<RowData> {
/**
* Code of \r, used to remove \r from a line when the line ends with \r\n.
*/
private static final byte CARRIAGE_RETURN = (byte) '\r';
/**
* Code of \n, used to identify if \n is used as delimiter.
*/
private static final byte NEW_LINE = (byte) '\n';
private final List<DataType> fieldTypes;
private final List<String> fieldNames;
private final int[] selectFields;
private final List<String> partitionKeys;
private final String defaultPartValue;
private final long limit;
private final List<String> jsonSelectFieldNames;
private final int[] jsonFieldMapping;
private final JsonRowDataDeserializationSchema deserializationSchema;
private transient boolean end;
private transient long emitted;
private transient GenericRowData rowData;
public JsonInputFormat(
Path[] filePaths,
List<DataType> fieldTypes,
List<String> fieldNames,
int[] selectFields,
List<String> partitionKeys,
String defaultPartValue,
long limit,
List<String> jsonSelectFieldNames,
int[] jsonFieldMapping,
JsonRowDataDeserializationSchema deserializationSchema) {
super.setFilePaths(filePaths);
this.fieldTypes = fieldTypes;
this.fieldNames = fieldNames;
this.selectFields = selectFields;
this.partitionKeys = partitionKeys;
this.defaultPartValue = defaultPartValue;
this.limit = limit;
this.jsonSelectFieldNames = jsonSelectFieldNames;
this.jsonFieldMapping = jsonFieldMapping;
this.deserializationSchema = deserializationSchema;
}
@Override
public boolean supportsMultiPaths() {
return true;
}
@Override
public void open(FileInputSplit split) throws IOException {
super.open(split);
this.end = false;
this.emitted = 0L;
this.rowData = PartitionPathUtils.fillPartitionValueForRecord(fieldNames, fieldTypes, selectFields,
partitionKeys, currentSplit.getPath(), defaultPartValue);
}
@Override
public boolean reachedEnd() {
return emitted >= limit || end;
}
@Override
} | class JsonInputFormat extends DelimitedInputFormat<RowData> {
/**
* Code of \r, used to remove \r from a line when the line ends with \r\n.
*/
private static final byte CARRIAGE_RETURN = (byte) '\r';
/**
* Code of \n, used to identify if \n is used as delimiter.
*/
private static final byte NEW_LINE = (byte) '\n';
private final DataType[] fieldTypes;
private final String[] fieldNames;
private final int[] selectFields;
private final List<String> partitionKeys;
private final String defaultPartValue;
private final long limit;
private final int[] jsonSelectFieldToProjectFieldMapping;
private final int[] jsonSelectFieldToJsonFieldMapping;
private final JsonRowDataDeserializationSchema deserializationSchema;
private transient boolean end;
private transient long emitted;
private transient GenericRowData rowData;
public JsonInputFormat(
Path[] filePaths,
DataType[] fieldTypes,
String[] fieldNames,
int[] selectFields,
List<String> partitionKeys,
String defaultPartValue,
long limit,
int[] jsonSelectFieldToProjectFieldMapping,
int[] jsonSelectFieldToJsonFieldMapping,
JsonRowDataDeserializationSchema deserializationSchema) {
super.setFilePaths(filePaths);
this.fieldTypes = fieldTypes;
this.fieldNames = fieldNames;
this.selectFields = selectFields;
this.partitionKeys = partitionKeys;
this.defaultPartValue = defaultPartValue;
this.limit = limit;
this.jsonSelectFieldToProjectFieldMapping = jsonSelectFieldToProjectFieldMapping;
this.jsonSelectFieldToJsonFieldMapping = jsonSelectFieldToJsonFieldMapping;
this.deserializationSchema = deserializationSchema;
}
@Override
public boolean supportsMultiPaths() {
return true;
}
@Override
public void open(FileInputSplit split) throws IOException {
super.open(split);
this.end = false;
this.emitted = 0L;
this.rowData = PartitionPathUtils.fillPartitionValueForRecord(fieldNames, fieldTypes, selectFields,
partitionKeys, currentSplit.getPath(), defaultPartValue);
}
@Override
public boolean reachedEnd() {
return emitted >= limit || end;
}
@Override
@Override
public RowData nextRecord(RowData record) throws IOException {
while (true) {
if (readLine()) {
RowData row = readRecord(record, this.currBuffer, this.currOffset, this.currLen);
if (row == null) {
continue;
} else {
return row;
}
} else {
this.end = true;
return null;
}
}
}
} |
[Optional] you can write something like: ``` return configContext.provider.refreshTokens(refreshToken) .plug(u -> { if (! BlockingOperationControl.isBlockingAllowed()) { return u.runSubscriptionOn(resolver.getBlockingExecutor()); } return u; }); ``` | private Uni<AuthorizationCodeTokens> refreshTokensUni(TenantConfigContext configContext, String refreshToken) {
if (BlockingOperationControl.isBlockingAllowed()) {
return configContext.provider.refreshTokens(refreshToken);
}
return configContext.provider.refreshTokens(refreshToken)
.runSubscriptionOn(resolver.getBlockingExecutor());
} | return configContext.provider.refreshTokens(refreshToken); | private Uni<AuthorizationCodeTokens> refreshTokensUni(TenantConfigContext configContext, String refreshToken) {
return configContext.provider.refreshTokens(refreshToken).plug(u -> {
if (!BlockingOperationControl.isBlockingAllowed()) {
return u.runSubscriptionOn(resolver.getBlockingExecutor());
}
return u;
});
} | class CodeAuthenticationMechanism extends AbstractOidcAuthenticationMechanism {
static final String AMP = "&";
static final String EQ = "=";
static final String COOKIE_DELIM = "|";
static final Pattern COOKIE_PATTERN = Pattern.compile("\\" + COOKIE_DELIM);
static final String SESSION_COOKIE_NAME = "q_session";
static final String SESSION_MAX_AGE_PARAM = "session-max-age";
private static final Logger LOG = Logger.getLogger(CodeAuthenticationMechanism.class);
private static final String STATE_COOKIE_NAME = "q_auth";
private static final String POST_LOGOUT_COOKIE_NAME = "q_post_logout";
private static QuarkusSecurityIdentity augmentIdentity(SecurityIdentity securityIdentity,
String accessToken,
String refreshToken,
RoutingContext context) {
IdTokenCredential idTokenCredential = securityIdentity.getCredential(IdTokenCredential.class);
RefreshToken refreshTokenCredential = new RefreshToken(refreshToken);
return QuarkusSecurityIdentity.builder()
.setPrincipal(securityIdentity.getPrincipal())
.addCredential(idTokenCredential)
.addCredential(new AccessTokenCredential(accessToken, refreshTokenCredential, context))
.addCredential(refreshTokenCredential)
.addRoles(securityIdentity.getRoles())
.addAttributes(securityIdentity.getAttributes())
.addPermissionChecker(new Function<Permission, Uni<Boolean>>() {
@Override
public Uni<Boolean> apply(Permission permission) {
return securityIdentity.checkPermission(permission);
}
}).build();
}
public Uni<SecurityIdentity> authenticate(RoutingContext context,
IdentityProviderManager identityProviderManager) {
final Cookie sessionCookie = context.request().getCookie(getSessionCookieName(resolver.resolveConfig(context)));
if (sessionCookie != null) {
Uni<TenantConfigContext> resolvedContext = resolver.resolveContext(context);
return resolvedContext.onItem()
.transformToUni(new Function<TenantConfigContext, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(TenantConfigContext tenantContext) {
return reAuthenticate(sessionCookie, context, identityProviderManager, tenantContext);
}
});
}
final String code = context.request().getParam("code");
if (code == null) {
return Uni.createFrom().optional(Optional.empty());
}
Uni<TenantConfigContext> resolvedContext = resolver.resolveContext(context);
return resolvedContext.onItem().transformToUni(new Function<TenantConfigContext, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(TenantConfigContext tenantContext) {
return performCodeFlow(identityProviderManager, context, tenantContext, code);
}
});
}
private Uni<SecurityIdentity> reAuthenticate(Cookie sessionCookie,
RoutingContext context,
IdentityProviderManager identityProviderManager,
TenantConfigContext configContext) {
AuthorizationCodeTokens session = resolver.getTokenStateManager().getTokens(context, configContext.oidcConfig,
sessionCookie.getValue());
context.put(OidcConstants.ACCESS_TOKEN_VALUE, session.getAccessToken());
return authenticate(identityProviderManager, new IdTokenCredential(session.getIdToken(), context))
.map(new Function<SecurityIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityIdentity identity) {
if (isLogout(context, configContext)) {
fireEvent(SecurityEvent.Type.OIDC_LOGOUT_RP_INITIATED, identity);
throw redirectToLogoutEndpoint(context, configContext, session.getIdToken());
}
return augmentIdentity(identity, session.getAccessToken(), session.getRefreshToken(), context);
}
}).onFailure().recoverWithUni(new Function<Throwable, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<? extends SecurityIdentity> apply(Throwable t) {
if (t instanceof AuthenticationRedirectException) {
throw (AuthenticationRedirectException) t;
}
if (!(t instanceof TokenAutoRefreshException)) {
boolean expired = (t.getCause() instanceof InvalidJwtException)
&& ((InvalidJwtException) t.getCause()).hasErrorCode(ErrorCodes.EXPIRED);
if (!expired) {
LOG.debugf("Authentication failure: %s", t.getCause());
throw new AuthenticationCompletionException(t.getCause());
}
if (!configContext.oidcConfig.token.refreshExpired) {
LOG.debug("Token has expired, token refresh is not allowed");
throw new AuthenticationCompletionException(t.getCause());
}
LOG.debug("Token has expired, trying to refresh it");
return refreshSecurityIdentity(configContext, session.getRefreshToken(), context,
identityProviderManager, false, null);
} else {
return refreshSecurityIdentity(configContext, session.getRefreshToken(), context,
identityProviderManager, true,
((TokenAutoRefreshException) t).getSecurityIdentity());
}
}
});
}
private boolean isJavaScript(RoutingContext context) {
String value = context.request().getHeader("X-Requested-With");
return "JavaScript".equals(value) || "XMLHttpRequest".equals(value);
}
private boolean shouldAutoRedirect(TenantConfigContext configContext, RoutingContext context) {
return isJavaScript(context) ? configContext.oidcConfig.authentication.javaScriptAutoRedirect : true;
}
public Uni<ChallengeData> getChallenge(RoutingContext context) {
Uni<TenantConfigContext> tenantContext = resolver.resolveContext(context);
return tenantContext.onItem().transformToUni(new Function<TenantConfigContext, Uni<? extends ChallengeData>>() {
@Override
public Uni<ChallengeData> apply(TenantConfigContext tenantContext) {
return getChallengeInternal(context, tenantContext);
}
});
}
public Uni<ChallengeData> getChallengeInternal(RoutingContext context, TenantConfigContext configContext) {
removeCookie(context, configContext, getSessionCookieName(configContext.oidcConfig));
if (!shouldAutoRedirect(configContext, context)) {
return Uni.createFrom().item(new ChallengeData(499, "WWW-Authenticate", "OIDC"));
}
StringBuilder codeFlowParams = new StringBuilder();
codeFlowParams.append(OidcConstants.CODE_FLOW_RESPONSE_TYPE).append(EQ).append(OidcConstants.CODE_FLOW_CODE);
codeFlowParams.append(AMP).append(OidcConstants.CLIENT_ID).append(EQ)
.append(urlEncode(configContext.oidcConfig.clientId.get()));
List<String> scopes = new ArrayList<>();
scopes.add("openid");
configContext.oidcConfig.getAuthentication().scopes.ifPresent(scopes::addAll);
codeFlowParams.append(AMP).append(OidcConstants.TOKEN_SCOPE).append(EQ).append(urlEncode(String.join(" ", scopes)));
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, isForceHttps(configContext), redirectPath);
LOG.debugf("Authentication request redirect_uri parameter: %s", redirectUriParam);
codeFlowParams.append(AMP).append(OidcConstants.CODE_FLOW_REDIRECT_URI).append(EQ).append(urlEncode(redirectUriParam));
codeFlowParams.append(AMP).append(OidcConstants.CODE_FLOW_STATE).append(EQ)
.append(generateCodeFlowState(context, configContext, redirectPath));
if (configContext.oidcConfig.authentication.getExtraParams() != null) {
for (Map.Entry<String, String> entry : configContext.oidcConfig.authentication.getExtraParams().entrySet()) {
codeFlowParams.append(AMP).append(entry.getKey()).append(EQ).append(urlEncode(entry.getValue()));
}
}
String authorizationURL = configContext.provider.getMetadata().getAuthorizationUri() + "?" + codeFlowParams.toString();
return Uni.createFrom().item(new ChallengeData(HttpResponseStatus.FOUND.code(), HttpHeaders.LOCATION,
authorizationURL));
}
private static String urlEncode(String value) {
try {
return URLEncoder.encode(value, StandardCharsets.UTF_8.name());
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private Uni<SecurityIdentity> performCodeFlow(IdentityProviderManager identityProviderManager,
RoutingContext context, TenantConfigContext configContext, String code) {
Cookie stateCookie = context.getCookie(getStateCookieName(configContext));
String userPath = null;
String userQuery = null;
if (stateCookie != null) {
List<String> values = context.queryParam("state");
if (values.size() != 1) {
LOG.debug("State parameter can not be empty or multi-valued");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else if (!stateCookie.getValue().startsWith(values.get(0))) {
LOG.debug("State cookie value does not match the state query parameter value");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else {
String[] pair = COOKIE_PATTERN.split(stateCookie.getValue());
if (pair.length == 2) {
int userQueryIndex = pair[1].indexOf("?");
if (userQueryIndex >= 0) {
userPath = pair[1].substring(0, userQueryIndex);
if (userQueryIndex + 1 < pair[1].length()) {
userQuery = pair[1].substring(userQueryIndex + 1);
}
} else {
userPath = pair[1];
}
}
removeCookie(context, configContext, getStateCookieName(configContext));
}
} else {
LOG.debug("The state cookie is missing after a redirect from IDP, authentication has failed");
return Uni.createFrom().failure(new AuthenticationCompletionException());
}
final String finalUserPath = userPath;
final String finalUserQuery = userQuery;
Uni<AuthorizationCodeTokens> codeFlowTokensUni = getCodeFlowTokensUni(context, configContext, code);
return codeFlowTokensUni
.onItemOrFailure()
.transformToUni(new BiFunction<AuthorizationCodeTokens, Throwable, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(final AuthorizationCodeTokens tokens, final Throwable tOuter) {
if (tOuter != null) {
LOG.debugf("Exception during the code to token exchange: %s", tOuter.getMessage());
return Uni.createFrom().failure(new AuthenticationCompletionException(tOuter));
}
context.put(NEW_AUTHENTICATION, Boolean.TRUE);
context.put(OidcConstants.ACCESS_TOKEN_VALUE, tokens.getAccessToken());
return authenticate(identityProviderManager, new IdTokenCredential(tokens.getIdToken(), context))
.map(new Function<SecurityIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityIdentity identity) {
processSuccessfulAuthentication(context, configContext,
tokens, identity);
boolean removeRedirectParams = configContext.oidcConfig.authentication
.isRemoveRedirectParameters();
if (removeRedirectParams || finalUserPath != null
|| finalUserQuery != null) {
URI absoluteUri = URI.create(context.request().absoluteURI());
StringBuilder finalUriWithoutQuery = new StringBuilder(buildUri(context,
isForceHttps(configContext),
absoluteUri.getAuthority(),
(finalUserPath != null ? finalUserPath
: absoluteUri.getRawPath())));
if (!removeRedirectParams) {
finalUriWithoutQuery.append('?').append(absoluteUri.getRawQuery());
}
if (finalUserQuery != null) {
finalUriWithoutQuery.append(!removeRedirectParams ? "" : "?");
finalUriWithoutQuery.append(finalUserQuery);
}
String finalRedirectUri = finalUriWithoutQuery.toString();
LOG.debugf("Final redirect URI: %s", finalRedirectUri);
throw new AuthenticationRedirectException(finalRedirectUri);
} else {
return augmentIdentity(identity, tokens.getAccessToken(),
tokens.getRefreshToken(), context);
}
}
}).onFailure().transform(new Function<Throwable, Throwable>() {
@Override
public Throwable apply(Throwable tInner) {
if (tInner instanceof AuthenticationRedirectException) {
return tInner;
}
return new AuthenticationCompletionException(tInner);
}
});
}
});
}
private void processSuccessfulAuthentication(RoutingContext context,
TenantConfigContext configContext,
AuthorizationCodeTokens tokens,
SecurityIdentity securityIdentity) {
removeCookie(context, configContext, getSessionCookieName(configContext.oidcConfig));
JsonObject idToken = OidcUtils.decodeJwtContent(tokens.getIdToken());
if (!idToken.containsKey("exp") || !idToken.containsKey("iat")) {
LOG.debug("ID Token is required to contain 'exp' and 'iat' claims");
throw new AuthenticationCompletionException();
}
long maxAge = idToken.getLong("exp") - idToken.getLong("iat");
if (configContext.oidcConfig.token.lifespanGrace.isPresent()) {
maxAge += configContext.oidcConfig.token.lifespanGrace.getAsInt();
}
if (configContext.oidcConfig.token.refreshExpired) {
maxAge += configContext.oidcConfig.authentication.sessionAgeExtension.getSeconds();
}
context.put(SESSION_MAX_AGE_PARAM, maxAge);
String cookieValue = resolver.getTokenStateManager()
.createTokenState(context, configContext.oidcConfig, tokens);
createCookie(context, configContext.oidcConfig, getSessionCookieName(configContext.oidcConfig), cookieValue, maxAge);
fireEvent(SecurityEvent.Type.OIDC_LOGIN, securityIdentity);
}
private void fireEvent(SecurityEvent.Type eventType, SecurityIdentity securityIdentity) {
if (resolver.isSecurityEventObserved()) {
resolver.getSecurityEvent().fire(new SecurityEvent(eventType, securityIdentity));
}
}
private String getRedirectPath(TenantConfigContext configContext, RoutingContext context) {
Authentication auth = configContext.oidcConfig.getAuthentication();
return auth.getRedirectPath().isPresent() ? auth.getRedirectPath().get() : context.request().path();
}
private String generateCodeFlowState(RoutingContext context, TenantConfigContext configContext,
String redirectPath) {
String uuid = UUID.randomUUID().toString();
String cookieValue = uuid;
Authentication auth = configContext.oidcConfig.getAuthentication();
if (auth.isRestorePathAfterRedirect()) {
String requestQuery = context.request().query();
String requestPath = !redirectPath.equals(context.request().path()) || requestQuery != null
? context.request().path()
: "";
if (requestQuery != null) {
requestPath += ("?" + requestQuery);
}
if (!requestPath.isEmpty()) {
cookieValue += (COOKIE_DELIM + requestPath);
}
}
createCookie(context, configContext.oidcConfig, getStateCookieName(configContext), cookieValue, 60 * 30);
return uuid;
}
private String generatePostLogoutState(RoutingContext context, TenantConfigContext configContext) {
removeCookie(context, configContext, getPostLogoutCookieName(configContext));
return createCookie(context, configContext.oidcConfig, getPostLogoutCookieName(configContext),
UUID.randomUUID().toString(),
60 * 30).getValue();
}
static ServerCookie createCookie(RoutingContext context, OidcTenantConfig oidcConfig,
String name, String value, long maxAge) {
ServerCookie cookie = new CookieImpl(name, value);
cookie.setHttpOnly(true);
cookie.setSecure(oidcConfig.authentication.cookieForceSecure || context.request().isSSL());
cookie.setMaxAge(maxAge);
LOG.debugf(name + " cookie 'max-age' parameter is set to %d", maxAge);
Authentication auth = oidcConfig.getAuthentication();
setCookiePath(context, auth, cookie);
if (auth.cookieDomain.isPresent()) {
cookie.setDomain(auth.getCookieDomain().get());
}
context.response().addCookie(cookie);
return cookie;
}
static void setCookiePath(RoutingContext context, Authentication auth, ServerCookie cookie) {
if (auth.cookiePathHeader.isPresent() && context.request().headers().contains(auth.cookiePathHeader.get())) {
cookie.setPath(context.request().getHeader(auth.cookiePathHeader.get()));
} else if (auth.cookiePath.isPresent()) {
cookie.setPath(auth.getCookiePath().get());
}
}
private String buildUri(RoutingContext context, boolean forceHttps, String path) {
String authority = URI.create(context.request().absoluteURI()).getAuthority();
return buildUri(context, forceHttps, authority, path);
}
private String buildUri(RoutingContext context, boolean forceHttps, String authority, String path) {
final String scheme = forceHttps ? "https" : context.request().scheme();
String forwardedPrefix = "";
if (resolver.isEnableHttpForwardedPrefix()) {
String forwardedPrefixHeader = context.request().getHeader("X-Forwarded-Prefix");
if (forwardedPrefixHeader != null && !forwardedPrefixHeader.equals("/") && !forwardedPrefixHeader.equals("
forwardedPrefix = forwardedPrefixHeader;
if (forwardedPrefix.endsWith("/")) {
forwardedPrefix = forwardedPrefix.substring(0, forwardedPrefix.length() - 1);
}
}
}
return new StringBuilder(scheme).append(":
.append(authority)
.append(forwardedPrefix)
.append(path)
.toString();
}
private void removeCookie(RoutingContext context, TenantConfigContext configContext, String cookieName) {
ServerCookie cookie = (ServerCookie) context.cookieMap().get(cookieName);
if (cookie != null) {
if (SESSION_COOKIE_NAME.equals(cookieName)) {
resolver.getTokenStateManager().deleteTokens(context, configContext.oidcConfig, cookie.getValue());
}
removeCookie(context, cookie, configContext.oidcConfig);
}
}
static void removeCookie(RoutingContext context, ServerCookie cookie, OidcTenantConfig oidcConfig) {
if (cookie != null) {
cookie.setValue("");
cookie.setMaxAge(0);
Authentication auth = oidcConfig.getAuthentication();
setCookiePath(context, auth, cookie);
if (auth.cookieDomain.isPresent()) {
cookie.setDomain(auth.cookieDomain.get());
}
}
}
private boolean isLogout(RoutingContext context, TenantConfigContext configContext) {
Optional<String> logoutPath = configContext.oidcConfig.logout.path;
if (logoutPath.isPresent()) {
return context.request().absoluteURI().equals(
buildUri(context, false, logoutPath.get()));
}
return false;
}
private Uni<SecurityIdentity> refreshSecurityIdentity(TenantConfigContext configContext, String refreshToken,
RoutingContext context, IdentityProviderManager identityProviderManager, boolean autoRefresh,
SecurityIdentity fallback) {
Uni<AuthorizationCodeTokens> refreshedTokensUni = refreshTokensUni(configContext, refreshToken);
return refreshedTokensUni
.onItemOrFailure()
.transformToUni(new BiFunction<AuthorizationCodeTokens, Throwable, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(final AuthorizationCodeTokens tokens, final Throwable t) {
return Uni.createFrom().emitter(new Consumer<UniEmitter<? super SecurityIdentity>>() {
@Override
public void accept(UniEmitter<? super SecurityIdentity> emitter) {
if (t != null) {
LOG.debugf("ID token refresh has failed: %s", t.getMessage());
if (autoRefresh) {
LOG.debug("Using the current SecurityIdentity since the ID token is still valid");
emitter.complete(((TokenAutoRefreshException) t).getSecurityIdentity());
} else {
emitter.fail(new AuthenticationFailedException(t));
}
} else {
context.put(OidcConstants.ACCESS_TOKEN_VALUE, tokens.getAccessToken());
context.put(REFRESH_TOKEN_GRANT_RESPONSE, Boolean.TRUE);
authenticate(identityProviderManager,
new IdTokenCredential(tokens.getIdToken(), context))
.subscribe().with(new Consumer<SecurityIdentity>() {
@Override
public void accept(SecurityIdentity identity) {
processSuccessfulAuthentication(context, configContext,
tokens, identity);
SecurityIdentity newSecurityIdentity = augmentIdentity(identity,
tokens.getAccessToken(), tokens.getRefreshToken(), context);
fireEvent(autoRefresh ? SecurityEvent.Type.OIDC_SESSION_REFRESHED
: SecurityEvent.Type.OIDC_SESSION_EXPIRED_AND_REFRESHED,
newSecurityIdentity);
emitter.complete(newSecurityIdentity);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
emitter.fail(new AuthenticationFailedException(throwable));
}
});
}
}
});
}
});
}
private Uni<AuthorizationCodeTokens> getCodeFlowTokensUni(RoutingContext context, TenantConfigContext configContext,
String code) {
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, isForceHttps(configContext), redirectPath);
LOG.debugf("Token request redirect_uri parameter: %s", redirectUriParam);
if (BlockingOperationControl.isBlockingAllowed()) {
return configContext.provider.getCodeFlowTokens(code, redirectUriParam);
}
return configContext.provider.getCodeFlowTokens(code, redirectUriParam)
.runSubscriptionOn(resolver.getBlockingExecutor());
}
private String buildLogoutRedirectUri(TenantConfigContext configContext, String idToken, RoutingContext context) {
String logoutPath = configContext.provider.getMetadata().getEndSessionUri();
StringBuilder logoutUri = new StringBuilder(logoutPath).append("?").append("id_token_hint=").append(idToken);
if (configContext.oidcConfig.logout.postLogoutPath.isPresent()) {
logoutUri.append("&post_logout_redirect_uri=").append(
buildUri(context, isForceHttps(configContext), configContext.oidcConfig.logout.postLogoutPath.get()));
logoutUri.append("&state=").append(generatePostLogoutState(context, configContext));
}
return logoutUri.toString();
}
private boolean isForceHttps(TenantConfigContext configContext) {
return configContext.oidcConfig.authentication.forceRedirectHttpsScheme;
}
private AuthenticationRedirectException redirectToLogoutEndpoint(RoutingContext context, TenantConfigContext configContext,
String idToken) {
removeCookie(context, configContext, getSessionCookieName(configContext.oidcConfig));
return new AuthenticationRedirectException(buildLogoutRedirectUri(configContext, idToken, context));
}
private static String getStateCookieName(TenantConfigContext configContext) {
String cookieSuffix = getCookieSuffix(configContext.oidcConfig.tenantId.get());
return STATE_COOKIE_NAME + cookieSuffix;
}
private static String getPostLogoutCookieName(TenantConfigContext configContext) {
String cookieSuffix = getCookieSuffix(configContext.oidcConfig.tenantId.get());
return POST_LOGOUT_COOKIE_NAME + cookieSuffix;
}
private static String getSessionCookieName(OidcTenantConfig oidcConfig) {
String cookieSuffix = getCookieSuffix(oidcConfig.tenantId.get());
return SESSION_COOKIE_NAME + cookieSuffix;
}
static String getCookieSuffix(String tenantId) {
return !"Default".equals(tenantId) ? "_" + tenantId : "";
}
} | class CodeAuthenticationMechanism extends AbstractOidcAuthenticationMechanism {
static final String AMP = "&";
static final String EQ = "=";
static final String COOKIE_DELIM = "|";
static final Pattern COOKIE_PATTERN = Pattern.compile("\\" + COOKIE_DELIM);
static final String SESSION_COOKIE_NAME = "q_session";
static final String SESSION_MAX_AGE_PARAM = "session-max-age";
private static final Logger LOG = Logger.getLogger(CodeAuthenticationMechanism.class);
private static final String STATE_COOKIE_NAME = "q_auth";
private static final String POST_LOGOUT_COOKIE_NAME = "q_post_logout";
private static QuarkusSecurityIdentity augmentIdentity(SecurityIdentity securityIdentity,
String accessToken,
String refreshToken,
RoutingContext context) {
IdTokenCredential idTokenCredential = securityIdentity.getCredential(IdTokenCredential.class);
RefreshToken refreshTokenCredential = new RefreshToken(refreshToken);
return QuarkusSecurityIdentity.builder()
.setPrincipal(securityIdentity.getPrincipal())
.addCredential(idTokenCredential)
.addCredential(new AccessTokenCredential(accessToken, refreshTokenCredential, context))
.addCredential(refreshTokenCredential)
.addRoles(securityIdentity.getRoles())
.addAttributes(securityIdentity.getAttributes())
.addPermissionChecker(new Function<Permission, Uni<Boolean>>() {
@Override
public Uni<Boolean> apply(Permission permission) {
return securityIdentity.checkPermission(permission);
}
}).build();
}
public Uni<SecurityIdentity> authenticate(RoutingContext context,
IdentityProviderManager identityProviderManager) {
final Cookie sessionCookie = context.request().getCookie(getSessionCookieName(resolver.resolveConfig(context)));
if (sessionCookie != null) {
Uni<TenantConfigContext> resolvedContext = resolver.resolveContext(context);
return resolvedContext.onItem()
.transformToUni(new Function<TenantConfigContext, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(TenantConfigContext tenantContext) {
return reAuthenticate(sessionCookie, context, identityProviderManager, tenantContext);
}
});
}
final String code = context.request().getParam("code");
if (code == null) {
return Uni.createFrom().optional(Optional.empty());
}
Uni<TenantConfigContext> resolvedContext = resolver.resolveContext(context);
return resolvedContext.onItem().transformToUni(new Function<TenantConfigContext, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(TenantConfigContext tenantContext) {
return performCodeFlow(identityProviderManager, context, tenantContext, code);
}
});
}
private Uni<SecurityIdentity> reAuthenticate(Cookie sessionCookie,
RoutingContext context,
IdentityProviderManager identityProviderManager,
TenantConfigContext configContext) {
AuthorizationCodeTokens session = resolver.getTokenStateManager().getTokens(context, configContext.oidcConfig,
sessionCookie.getValue());
context.put(OidcConstants.ACCESS_TOKEN_VALUE, session.getAccessToken());
return authenticate(identityProviderManager, new IdTokenCredential(session.getIdToken(), context))
.map(new Function<SecurityIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityIdentity identity) {
if (isLogout(context, configContext)) {
fireEvent(SecurityEvent.Type.OIDC_LOGOUT_RP_INITIATED, identity);
throw redirectToLogoutEndpoint(context, configContext, session.getIdToken());
}
return augmentIdentity(identity, session.getAccessToken(), session.getRefreshToken(), context);
}
}).onFailure().recoverWithUni(new Function<Throwable, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<? extends SecurityIdentity> apply(Throwable t) {
if (t instanceof AuthenticationRedirectException) {
throw (AuthenticationRedirectException) t;
}
if (!(t instanceof TokenAutoRefreshException)) {
boolean expired = (t.getCause() instanceof InvalidJwtException)
&& ((InvalidJwtException) t.getCause()).hasErrorCode(ErrorCodes.EXPIRED);
if (!expired) {
LOG.debugf("Authentication failure: %s", t.getCause());
throw new AuthenticationCompletionException(t.getCause());
}
if (!configContext.oidcConfig.token.refreshExpired) {
LOG.debug("Token has expired, token refresh is not allowed");
throw new AuthenticationCompletionException(t.getCause());
}
LOG.debug("Token has expired, trying to refresh it");
return refreshSecurityIdentity(configContext, session.getRefreshToken(), context,
identityProviderManager, false, null);
} else {
return refreshSecurityIdentity(configContext, session.getRefreshToken(), context,
identityProviderManager, true,
((TokenAutoRefreshException) t).getSecurityIdentity());
}
}
});
}
private boolean isJavaScript(RoutingContext context) {
String value = context.request().getHeader("X-Requested-With");
return "JavaScript".equals(value) || "XMLHttpRequest".equals(value);
}
private boolean shouldAutoRedirect(TenantConfigContext configContext, RoutingContext context) {
return isJavaScript(context) ? configContext.oidcConfig.authentication.javaScriptAutoRedirect : true;
}
public Uni<ChallengeData> getChallenge(RoutingContext context) {
Uni<TenantConfigContext> tenantContext = resolver.resolveContext(context);
return tenantContext.onItem().transformToUni(new Function<TenantConfigContext, Uni<? extends ChallengeData>>() {
@Override
public Uni<ChallengeData> apply(TenantConfigContext tenantContext) {
return getChallengeInternal(context, tenantContext);
}
});
}
public Uni<ChallengeData> getChallengeInternal(RoutingContext context, TenantConfigContext configContext) {
removeCookie(context, configContext, getSessionCookieName(configContext.oidcConfig));
if (!shouldAutoRedirect(configContext, context)) {
return Uni.createFrom().item(new ChallengeData(499, "WWW-Authenticate", "OIDC"));
}
StringBuilder codeFlowParams = new StringBuilder();
codeFlowParams.append(OidcConstants.CODE_FLOW_RESPONSE_TYPE).append(EQ).append(OidcConstants.CODE_FLOW_CODE);
codeFlowParams.append(AMP).append(OidcConstants.CLIENT_ID).append(EQ)
.append(urlEncode(configContext.oidcConfig.clientId.get()));
List<String> scopes = new ArrayList<>();
scopes.add("openid");
configContext.oidcConfig.getAuthentication().scopes.ifPresent(scopes::addAll);
codeFlowParams.append(AMP).append(OidcConstants.TOKEN_SCOPE).append(EQ).append(urlEncode(String.join(" ", scopes)));
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, isForceHttps(configContext), redirectPath);
LOG.debugf("Authentication request redirect_uri parameter: %s", redirectUriParam);
codeFlowParams.append(AMP).append(OidcConstants.CODE_FLOW_REDIRECT_URI).append(EQ).append(urlEncode(redirectUriParam));
codeFlowParams.append(AMP).append(OidcConstants.CODE_FLOW_STATE).append(EQ)
.append(generateCodeFlowState(context, configContext, redirectPath));
if (configContext.oidcConfig.authentication.getExtraParams() != null) {
for (Map.Entry<String, String> entry : configContext.oidcConfig.authentication.getExtraParams().entrySet()) {
codeFlowParams.append(AMP).append(entry.getKey()).append(EQ).append(urlEncode(entry.getValue()));
}
}
String authorizationURL = configContext.provider.getMetadata().getAuthorizationUri() + "?" + codeFlowParams.toString();
return Uni.createFrom().item(new ChallengeData(HttpResponseStatus.FOUND.code(), HttpHeaders.LOCATION,
authorizationURL));
}
private static String urlEncode(String value) {
try {
return URLEncoder.encode(value, StandardCharsets.UTF_8.name());
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private Uni<SecurityIdentity> performCodeFlow(IdentityProviderManager identityProviderManager,
RoutingContext context, TenantConfigContext configContext, String code) {
Cookie stateCookie = context.getCookie(getStateCookieName(configContext));
String userPath = null;
String userQuery = null;
if (stateCookie != null) {
List<String> values = context.queryParam("state");
if (values.size() != 1) {
LOG.debug("State parameter can not be empty or multi-valued");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else if (!stateCookie.getValue().startsWith(values.get(0))) {
LOG.debug("State cookie value does not match the state query parameter value");
return Uni.createFrom().failure(new AuthenticationCompletionException());
} else {
String[] pair = COOKIE_PATTERN.split(stateCookie.getValue());
if (pair.length == 2) {
int userQueryIndex = pair[1].indexOf("?");
if (userQueryIndex >= 0) {
userPath = pair[1].substring(0, userQueryIndex);
if (userQueryIndex + 1 < pair[1].length()) {
userQuery = pair[1].substring(userQueryIndex + 1);
}
} else {
userPath = pair[1];
}
}
removeCookie(context, configContext, getStateCookieName(configContext));
}
} else {
LOG.debug("The state cookie is missing after a redirect from IDP, authentication has failed");
return Uni.createFrom().failure(new AuthenticationCompletionException());
}
final String finalUserPath = userPath;
final String finalUserQuery = userQuery;
Uni<AuthorizationCodeTokens> codeFlowTokensUni = getCodeFlowTokensUni(context, configContext, code);
return codeFlowTokensUni
.onItemOrFailure()
.transformToUni(new BiFunction<AuthorizationCodeTokens, Throwable, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(final AuthorizationCodeTokens tokens, final Throwable tOuter) {
if (tOuter != null) {
LOG.debugf("Exception during the code to token exchange: %s", tOuter.getMessage());
return Uni.createFrom().failure(new AuthenticationCompletionException(tOuter));
}
context.put(NEW_AUTHENTICATION, Boolean.TRUE);
context.put(OidcConstants.ACCESS_TOKEN_VALUE, tokens.getAccessToken());
return authenticate(identityProviderManager, new IdTokenCredential(tokens.getIdToken(), context))
.map(new Function<SecurityIdentity, SecurityIdentity>() {
@Override
public SecurityIdentity apply(SecurityIdentity identity) {
processSuccessfulAuthentication(context, configContext,
tokens, identity);
boolean removeRedirectParams = configContext.oidcConfig.authentication
.isRemoveRedirectParameters();
if (removeRedirectParams || finalUserPath != null
|| finalUserQuery != null) {
URI absoluteUri = URI.create(context.request().absoluteURI());
StringBuilder finalUriWithoutQuery = new StringBuilder(buildUri(context,
isForceHttps(configContext),
absoluteUri.getAuthority(),
(finalUserPath != null ? finalUserPath
: absoluteUri.getRawPath())));
if (!removeRedirectParams) {
finalUriWithoutQuery.append('?').append(absoluteUri.getRawQuery());
}
if (finalUserQuery != null) {
finalUriWithoutQuery.append(!removeRedirectParams ? "" : "?");
finalUriWithoutQuery.append(finalUserQuery);
}
String finalRedirectUri = finalUriWithoutQuery.toString();
LOG.debugf("Final redirect URI: %s", finalRedirectUri);
throw new AuthenticationRedirectException(finalRedirectUri);
} else {
return augmentIdentity(identity, tokens.getAccessToken(),
tokens.getRefreshToken(), context);
}
}
}).onFailure().transform(new Function<Throwable, Throwable>() {
@Override
public Throwable apply(Throwable tInner) {
if (tInner instanceof AuthenticationRedirectException) {
return tInner;
}
return new AuthenticationCompletionException(tInner);
}
});
}
});
}
private void processSuccessfulAuthentication(RoutingContext context,
TenantConfigContext configContext,
AuthorizationCodeTokens tokens,
SecurityIdentity securityIdentity) {
removeCookie(context, configContext, getSessionCookieName(configContext.oidcConfig));
JsonObject idToken = OidcUtils.decodeJwtContent(tokens.getIdToken());
if (!idToken.containsKey("exp") || !idToken.containsKey("iat")) {
LOG.debug("ID Token is required to contain 'exp' and 'iat' claims");
throw new AuthenticationCompletionException();
}
long maxAge = idToken.getLong("exp") - idToken.getLong("iat");
if (configContext.oidcConfig.token.lifespanGrace.isPresent()) {
maxAge += configContext.oidcConfig.token.lifespanGrace.getAsInt();
}
if (configContext.oidcConfig.token.refreshExpired) {
maxAge += configContext.oidcConfig.authentication.sessionAgeExtension.getSeconds();
}
context.put(SESSION_MAX_AGE_PARAM, maxAge);
String cookieValue = resolver.getTokenStateManager()
.createTokenState(context, configContext.oidcConfig, tokens);
createCookie(context, configContext.oidcConfig, getSessionCookieName(configContext.oidcConfig), cookieValue, maxAge);
fireEvent(SecurityEvent.Type.OIDC_LOGIN, securityIdentity);
}
private void fireEvent(SecurityEvent.Type eventType, SecurityIdentity securityIdentity) {
if (resolver.isSecurityEventObserved()) {
resolver.getSecurityEvent().fire(new SecurityEvent(eventType, securityIdentity));
}
}
private String getRedirectPath(TenantConfigContext configContext, RoutingContext context) {
Authentication auth = configContext.oidcConfig.getAuthentication();
return auth.getRedirectPath().isPresent() ? auth.getRedirectPath().get() : context.request().path();
}
private String generateCodeFlowState(RoutingContext context, TenantConfigContext configContext,
String redirectPath) {
String uuid = UUID.randomUUID().toString();
String cookieValue = uuid;
Authentication auth = configContext.oidcConfig.getAuthentication();
if (auth.isRestorePathAfterRedirect()) {
String requestQuery = context.request().query();
String requestPath = !redirectPath.equals(context.request().path()) || requestQuery != null
? context.request().path()
: "";
if (requestQuery != null) {
requestPath += ("?" + requestQuery);
}
if (!requestPath.isEmpty()) {
cookieValue += (COOKIE_DELIM + requestPath);
}
}
createCookie(context, configContext.oidcConfig, getStateCookieName(configContext), cookieValue, 60 * 30);
return uuid;
}
private String generatePostLogoutState(RoutingContext context, TenantConfigContext configContext) {
removeCookie(context, configContext, getPostLogoutCookieName(configContext));
return createCookie(context, configContext.oidcConfig, getPostLogoutCookieName(configContext),
UUID.randomUUID().toString(),
60 * 30).getValue();
}
static ServerCookie createCookie(RoutingContext context, OidcTenantConfig oidcConfig,
String name, String value, long maxAge) {
ServerCookie cookie = new CookieImpl(name, value);
cookie.setHttpOnly(true);
cookie.setSecure(oidcConfig.authentication.cookieForceSecure || context.request().isSSL());
cookie.setMaxAge(maxAge);
LOG.debugf(name + " cookie 'max-age' parameter is set to %d", maxAge);
Authentication auth = oidcConfig.getAuthentication();
setCookiePath(context, auth, cookie);
if (auth.cookieDomain.isPresent()) {
cookie.setDomain(auth.getCookieDomain().get());
}
context.response().addCookie(cookie);
return cookie;
}
static void setCookiePath(RoutingContext context, Authentication auth, ServerCookie cookie) {
if (auth.cookiePathHeader.isPresent() && context.request().headers().contains(auth.cookiePathHeader.get())) {
cookie.setPath(context.request().getHeader(auth.cookiePathHeader.get()));
} else if (auth.cookiePath.isPresent()) {
cookie.setPath(auth.getCookiePath().get());
}
}
private String buildUri(RoutingContext context, boolean forceHttps, String path) {
String authority = URI.create(context.request().absoluteURI()).getAuthority();
return buildUri(context, forceHttps, authority, path);
}
private String buildUri(RoutingContext context, boolean forceHttps, String authority, String path) {
final String scheme = forceHttps ? "https" : context.request().scheme();
String forwardedPrefix = "";
if (resolver.isEnableHttpForwardedPrefix()) {
String forwardedPrefixHeader = context.request().getHeader("X-Forwarded-Prefix");
if (forwardedPrefixHeader != null && !forwardedPrefixHeader.equals("/") && !forwardedPrefixHeader.equals("
forwardedPrefix = forwardedPrefixHeader;
if (forwardedPrefix.endsWith("/")) {
forwardedPrefix = forwardedPrefix.substring(0, forwardedPrefix.length() - 1);
}
}
}
return new StringBuilder(scheme).append(":
.append(authority)
.append(forwardedPrefix)
.append(path)
.toString();
}
private void removeCookie(RoutingContext context, TenantConfigContext configContext, String cookieName) {
ServerCookie cookie = (ServerCookie) context.cookieMap().get(cookieName);
if (cookie != null) {
if (SESSION_COOKIE_NAME.equals(cookieName)) {
resolver.getTokenStateManager().deleteTokens(context, configContext.oidcConfig, cookie.getValue());
}
removeCookie(context, cookie, configContext.oidcConfig);
}
}
static void removeCookie(RoutingContext context, ServerCookie cookie, OidcTenantConfig oidcConfig) {
if (cookie != null) {
cookie.setValue("");
cookie.setMaxAge(0);
Authentication auth = oidcConfig.getAuthentication();
setCookiePath(context, auth, cookie);
if (auth.cookieDomain.isPresent()) {
cookie.setDomain(auth.cookieDomain.get());
}
}
}
private boolean isLogout(RoutingContext context, TenantConfigContext configContext) {
Optional<String> logoutPath = configContext.oidcConfig.logout.path;
if (logoutPath.isPresent()) {
return context.request().absoluteURI().equals(
buildUri(context, false, logoutPath.get()));
}
return false;
}
private Uni<SecurityIdentity> refreshSecurityIdentity(TenantConfigContext configContext, String refreshToken,
RoutingContext context, IdentityProviderManager identityProviderManager, boolean autoRefresh,
SecurityIdentity fallback) {
Uni<AuthorizationCodeTokens> refreshedTokensUni = refreshTokensUni(configContext, refreshToken);
return refreshedTokensUni
.onItemOrFailure()
.transformToUni(new BiFunction<AuthorizationCodeTokens, Throwable, Uni<? extends SecurityIdentity>>() {
@Override
public Uni<SecurityIdentity> apply(final AuthorizationCodeTokens tokens, final Throwable t) {
return Uni.createFrom().emitter(new Consumer<UniEmitter<? super SecurityIdentity>>() {
@Override
public void accept(UniEmitter<? super SecurityIdentity> emitter) {
if (t != null) {
LOG.debugf("ID token refresh has failed: %s", t.getMessage());
if (autoRefresh) {
LOG.debug("Using the current SecurityIdentity since the ID token is still valid");
emitter.complete(((TokenAutoRefreshException) t).getSecurityIdentity());
} else {
emitter.fail(new AuthenticationFailedException(t));
}
} else {
context.put(OidcConstants.ACCESS_TOKEN_VALUE, tokens.getAccessToken());
context.put(REFRESH_TOKEN_GRANT_RESPONSE, Boolean.TRUE);
authenticate(identityProviderManager,
new IdTokenCredential(tokens.getIdToken(), context))
.subscribe().with(new Consumer<SecurityIdentity>() {
@Override
public void accept(SecurityIdentity identity) {
processSuccessfulAuthentication(context, configContext,
tokens, identity);
SecurityIdentity newSecurityIdentity = augmentIdentity(identity,
tokens.getAccessToken(), tokens.getRefreshToken(), context);
fireEvent(autoRefresh ? SecurityEvent.Type.OIDC_SESSION_REFRESHED
: SecurityEvent.Type.OIDC_SESSION_EXPIRED_AND_REFRESHED,
newSecurityIdentity);
emitter.complete(newSecurityIdentity);
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) {
emitter.fail(new AuthenticationFailedException(throwable));
}
});
}
}
});
}
});
}
private Uni<AuthorizationCodeTokens> getCodeFlowTokensUni(RoutingContext context, TenantConfigContext configContext,
String code) {
String redirectPath = getRedirectPath(configContext, context);
String redirectUriParam = buildUri(context, isForceHttps(configContext), redirectPath);
LOG.debugf("Token request redirect_uri parameter: %s", redirectUriParam);
return configContext.provider.getCodeFlowTokens(code, redirectUriParam).plug(u -> {
if (!BlockingOperationControl.isBlockingAllowed()) {
return u.runSubscriptionOn(resolver.getBlockingExecutor());
}
return u;
});
}
private String buildLogoutRedirectUri(TenantConfigContext configContext, String idToken, RoutingContext context) {
String logoutPath = configContext.provider.getMetadata().getEndSessionUri();
StringBuilder logoutUri = new StringBuilder(logoutPath).append("?").append("id_token_hint=").append(idToken);
if (configContext.oidcConfig.logout.postLogoutPath.isPresent()) {
logoutUri.append("&post_logout_redirect_uri=").append(
buildUri(context, isForceHttps(configContext), configContext.oidcConfig.logout.postLogoutPath.get()));
logoutUri.append("&state=").append(generatePostLogoutState(context, configContext));
}
return logoutUri.toString();
}
private boolean isForceHttps(TenantConfigContext configContext) {
return configContext.oidcConfig.authentication.forceRedirectHttpsScheme;
}
private AuthenticationRedirectException redirectToLogoutEndpoint(RoutingContext context, TenantConfigContext configContext,
String idToken) {
removeCookie(context, configContext, getSessionCookieName(configContext.oidcConfig));
return new AuthenticationRedirectException(buildLogoutRedirectUri(configContext, idToken, context));
}
private static String getStateCookieName(TenantConfigContext configContext) {
String cookieSuffix = getCookieSuffix(configContext.oidcConfig.tenantId.get());
return STATE_COOKIE_NAME + cookieSuffix;
}
private static String getPostLogoutCookieName(TenantConfigContext configContext) {
String cookieSuffix = getCookieSuffix(configContext.oidcConfig.tenantId.get());
return POST_LOGOUT_COOKIE_NAME + cookieSuffix;
}
private static String getSessionCookieName(OidcTenantConfig oidcConfig) {
String cookieSuffix = getCookieSuffix(oidcConfig.tenantId.get());
return SESSION_COOKIE_NAME + cookieSuffix;
}
static String getCookieSuffix(String tenantId) {
return !"Default".equals(tenantId) ? "_" + tenantId : "";
}
} |
Shall we add an API to the RenameContext to get the honorChangeAnnotation flag, because otherwise we have to pass the flag as function args. | public CompletableFuture<WorkspaceEdit> rename(RenameParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
RenameContext context = ContextBuilder.buildRenameContext(params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
params.getPosition());
boolean honorsChangeAnnotations = this.clientCapabilities.getTextDocCapabilities()
.getRename().getHonorsChangeAnnotations() != null ?
this.clientCapabilities.getTextDocCapabilities().getRename().getHonorsChangeAnnotations()
: false;
return RenameUtil.rename(context, params.getNewName(), honorsChangeAnnotations);
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Rename", e);
} catch (Throwable e) {
String msg = "Operation 'text/rename' failed!";
this.clientLogger.logError(LSContextOperation.TXT_RENAME, msg, e, params.getTextDocument(),
params.getPosition());
}
return new WorkspaceEdit();
});
} | return RenameUtil.rename(context, params.getNewName(), honorsChangeAnnotations); | public CompletableFuture<WorkspaceEdit> rename(RenameParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
RenameContext context = ContextBuilder.buildRenameContext(params,
this.workspaceManager,
this.serverContext,
this.clientCapabilities);
return RenameUtil.rename(context);
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Rename", e);
} catch (Throwable e) {
String msg = "Operation 'text/rename' failed!";
this.clientLogger.logError(LSContextOperation.TXT_RENAME, msg, e, params.getTextDocument(),
params.getPosition());
}
return null;
});
} | class BallerinaTextDocumentService implements TextDocumentService {
private final BallerinaLanguageServer languageServer;
private LSClientCapabilities clientCapabilities;
private final WorkspaceManager workspaceManager;
private final LanguageServerContext serverContext;
private final LSClientLogger clientLogger;
BallerinaTextDocumentService(BallerinaLanguageServer languageServer,
WorkspaceManager workspaceManager,
LanguageServerContext serverContext) {
this.workspaceManager = workspaceManager;
this.languageServer = languageServer;
this.serverContext = serverContext;
this.clientLogger = LSClientLogger.getInstance(this.serverContext);
}
/**
* Set the client capabilities.
*
* @param clientCapabilities Client's Text Document Capabilities
*/
void setClientCapabilities(LSClientCapabilities clientCapabilities) {
this.clientCapabilities = clientCapabilities;
}
@Override
public CompletableFuture<Either<List<CompletionItem>, CompletionList>> completion(CompletionParams position) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = position.getTextDocument().getUri();
CompletionContext context = ContextBuilder.buildCompletionContext(fileUri,
this.workspaceManager,
this.clientCapabilities.getTextDocCapabilities().getCompletion(),
this.serverContext,
position.getPosition());
try {
return LangExtensionDelegator.instance().completion(position, context, this.serverContext);
} catch (Throwable e) {
String msg = "Operation 'text/completion' failed!";
this.clientLogger.logError(LSContextOperation.TXT_COMPLETION, msg, e, position.getTextDocument(),
position.getPosition());
}
return Either.forLeft(Collections.emptyList());
});
}
@Override
public CompletableFuture<Hover> hover(HoverParams params) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = params.getTextDocument().getUri();
HoverContext context = ContextBuilder
.buildHoverContext(fileUri, this.workspaceManager, this.serverContext, params.getPosition());
Hover hover;
try {
hover = HoverUtil.getHover(context);
} catch (Throwable e) {
String msg = "Operation 'text/hover' failed!";
this.clientLogger.logError(LSContextOperation.TXT_HOVER, msg, e, params.getTextDocument(),
params.getPosition());
hover = HoverUtil.getDefaultHoverObject();
}
return hover;
});
}
@Override
public CompletableFuture<SignatureHelp> signatureHelp(SignatureHelpParams params) {
return CompletableFuture.supplyAsync(() -> {
String uri = params.getTextDocument().getUri();
Optional<Path> sigFilePath = CommonUtil.getPathFromURI(uri);
if (sigFilePath.isEmpty()) {
return new SignatureHelp();
}
SignatureContext context = ContextBuilder.buildSignatureContext(uri,
this.workspaceManager,
this.clientCapabilities.getTextDocCapabilities().getSignatureHelp(),
this.serverContext,
params.getPosition());
try {
return SignatureHelpUtil.getSignatureHelp(context);
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Signature Help", e);
return new SignatureHelp();
} catch (Throwable e) {
String msg = "Operation 'text/signature' failed!";
this.clientLogger.logError(LSContextOperation.TXT_SIGNATURE, msg, e, params.getTextDocument(),
params.getPosition());
return new SignatureHelp();
}
});
}
@Override
public CompletableFuture<Either<List<? extends Location>, List<? extends LocationLink>>> definition
(DefinitionParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
BallerinaDefinitionContext defContext = ContextBuilder.buildDefinitionContext(
params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
params.getPosition());
return Either.forLeft(DefinitionUtil.getDefinition(defContext, params.getPosition()));
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Goto Definition", e);
return Either.forLeft(Collections.emptyList());
} catch (Throwable e) {
String msg = "Operation 'text/definition' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DEFINITION, msg, e, params.getTextDocument(),
params.getPosition());
return Either.forLeft(Collections.emptyList());
}
});
}
@Override
public CompletableFuture<List<? extends Location>> references(ReferenceParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
ReferencesContext context = ContextBuilder.buildReferencesContext(params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
params.getPosition());
Map<Module, List<io.ballerina.tools.diagnostics.Location>> referencesMap =
ReferencesUtil.getReferences(context);
List<Location> references = new ArrayList<>();
referencesMap.forEach((module, locations) ->
locations.forEach(location -> {
String uri = ReferencesUtil.getUriFromLocation(module, location);
references.add(new Location(uri, ReferencesUtil.getRange(location)));
}));
return references;
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Find References", e);
return new ArrayList<>();
} catch (Throwable e) {
String msg = "Operation 'text/references' failed!";
this.clientLogger.logError(LSContextOperation.TXT_REFERENCES, msg, e, params.getTextDocument(),
params.getPosition());
return new ArrayList<>();
}
});
}
@Override
public CompletableFuture<List<Either<SymbolInformation, DocumentSymbol>>>
documentSymbol(DocumentSymbolParams params) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = params.getTextDocument().getUri();
Optional<Path> docSymbolFilePath = CommonUtil.getPathFromURI(fileUri);
if (docSymbolFilePath.isEmpty()) {
return new ArrayList<>();
}
try {
return new ArrayList<>();
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Document Symbols", e);
return new ArrayList<>();
} catch (Throwable e) {
String msg = "Operation 'text/documentSymbol' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DOC_SYMBOL, msg, e, params.getTextDocument(),
(Position) null);
return new ArrayList<>();
}
});
}
@Override
public CompletableFuture<List<Either<Command, CodeAction>>> codeAction(CodeActionParams params) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = params.getTextDocument().getUri();
try {
CodeActionContext context = ContextBuilder.buildCodeActionContext(fileUri, workspaceManager,
this.serverContext, params);
return LangExtensionDelegator.instance().codeActions(params, context, this.serverContext).stream()
.map((Function<CodeAction, Either<Command, CodeAction>>) Either::forRight)
.collect(Collectors.toList());
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Code Action", e);
} catch (Throwable e) {
String msg = "Operation 'text/codeAction' failed!";
Range range = params.getRange();
this.clientLogger.logError(LSContextOperation.TXT_CODE_ACTION, msg, e, params.getTextDocument(),
range.getStart(), range.getEnd());
}
return Collections.emptyList();
});
}
@Override
public CompletableFuture<List<? extends CodeLens>> codeLens(CodeLensParams params) {
return CompletableFuture.supplyAsync(() -> {
List<CodeLens> lenses;
if (!LSCodeLensesProviderHolder.getInstance(this.serverContext).isEnabled()) {
clientCapabilities.getTextDocCapabilities().setCodeLens(null);
return new ArrayList<>();
}
String fileUri = params.getTextDocument().getUri();
Optional<Path> docSymbolFilePath = CommonUtil.getPathFromURI(fileUri);
if (docSymbolFilePath.isEmpty()) {
return new ArrayList<>();
}
DocumentServiceContext codeLensContext = ContextBuilder.buildBaseContext(fileUri,
this.workspaceManager,
LSContextOperation.TXT_CODE_LENS, this.serverContext);
try {
lenses = CodeLensUtil.getCodeLenses(codeLensContext, params.getTextDocument());
return lenses;
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Code Lens", e);
} catch (Throwable e) {
String msg = "Operation 'text/codeLens' failed!";
this.clientLogger.logError(LSContextOperation.TXT_CODE_LENS, msg, e, params.getTextDocument(),
(Position) null);
}
return Collections.emptyList();
});
}
@Override
public CompletableFuture<CodeLens> resolveCodeLens(CodeLens unresolved) {
return null;
}
@Override
public CompletableFuture<List<? extends TextEdit>> formatting(DocumentFormattingParams params) {
return CompletableFuture.supplyAsync(() -> {
TextEdit textEdit = new TextEdit();
String fileUri = params.getTextDocument().getUri();
Optional<Path> formattingFilePath = CommonUtil.getPathFromURI(fileUri);
if (formattingFilePath.isEmpty()) {
return Collections.singletonList(textEdit);
}
try {
CommonUtil.getPathFromURI(fileUri);
Optional<Document> document = workspaceManager.document(formattingFilePath.get());
if (document.isEmpty()) {
return new ArrayList<>();
}
SyntaxTree syntaxTree = document.get().syntaxTree();
String formattedSource = Formatter.format(syntaxTree).toSourceCode();
LinePosition eofPos = syntaxTree.rootNode().lineRange().endLine();
Range range = new Range(new Position(0, 0), new Position(eofPos.line() + 1, eofPos.offset()));
textEdit = new TextEdit(range, formattedSource);
return Collections.singletonList(textEdit);
} catch (UserErrorException | FormatterException e) {
this.clientLogger.notifyUser("Formatting", e);
return Collections.singletonList(textEdit);
} catch (Throwable e) {
String msg = "Operation 'text/formatting' failed!";
this.clientLogger.logError(LSContextOperation.TXT_FORMATTING, msg, e, params.getTextDocument(),
(Position) null);
return Collections.singletonList(textEdit);
}
});
}
/**
* The document range formatting request is sent from the client to the
* server to format a given range in a document.
* <p>
* Registration Options: TextDocumentRegistrationOptions
*/
@Override
public CompletableFuture<List<? extends TextEdit>> rangeFormatting(DocumentRangeFormattingParams params) {
return CompletableFuture.supplyAsync(() -> {
TextEdit textEdit = new TextEdit();
String fileUri = params.getTextDocument().getUri();
Optional<Path> formattingFilePath = CommonUtil.getPathFromURI(fileUri);
if (formattingFilePath.isEmpty()) {
return Collections.singletonList(textEdit);
}
try {
CommonUtil.getPathFromURI(fileUri);
Optional<Document> document = workspaceManager.document(formattingFilePath.get());
if (document.isEmpty()) {
return new ArrayList<>();
}
SyntaxTree syntaxTree = document.get().syntaxTree();
Range range = params.getRange();
LinePosition startPos = LinePosition.from(range.getStart().getLine(), range.getStart().getCharacter());
LinePosition endPos = LinePosition.from(range.getEnd().getLine(), range.getEnd().getCharacter());
LineRange lineRange = LineRange.from(syntaxTree.filePath(), startPos, endPos);
SyntaxTree formattedTree = Formatter.format(syntaxTree, lineRange);
LinePosition eofPos = syntaxTree.rootNode().lineRange().endLine();
Range updateRange = new Range(new Position(0, 0), new Position(eofPos.line() + 1, eofPos.offset()));
textEdit = new TextEdit(updateRange, formattedTree.toSourceCode());
return Collections.singletonList(textEdit);
} catch (UserErrorException | FormatterException e) {
this.clientLogger.notifyUser("Formatting", e);
return Collections.singletonList(textEdit);
} catch (Throwable e) {
String msg = "Operation 'text/rangeFormatting' failed!";
this.clientLogger.logError(LSContextOperation.TXT_RANGE_FORMATTING, msg, e, params.getTextDocument(),
(Position) null);
return Collections.singletonList(textEdit);
}
});
}
@Override
public CompletableFuture<Either<Range, PrepareRenameResult>> prepareRename(PrepareRenameParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
PrepareRenameContext context = ContextBuilder.buildPrepareRenameContext(
params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
params.getPosition());
Optional<Range> range = RenameUtil.prepareRename(context);
if (range.isPresent()) {
return Either.forLeft(range.get());
}
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Rename", e);
} catch (Throwable t) {
String msg = "Operation 'text/prepareRename' failed!";
this.clientLogger.logError(LSContextOperation.TXT_PREPARE_RENAME, msg, t, params.getTextDocument(),
params.getPosition());
}
return null;
});
}
@Override
@Override
public void didOpen(DidOpenTextDocumentParams params) {
String fileUri = params.getTextDocument().getUri();
try {
DocumentServiceContext context = ContextBuilder.buildBaseContext(fileUri, this.workspaceManager,
LSContextOperation.TXT_DID_OPEN, this.serverContext);
this.workspaceManager.didOpen(context.filePath(), params);
this.clientLogger.logTrace("Operation '" + LSContextOperation.TXT_DID_OPEN.getName() +
"' {fileUri: '" + fileUri + "'} opened");
DiagnosticsHelper diagnosticsHelper = DiagnosticsHelper.getInstance(this.serverContext);
diagnosticsHelper.compileAndSendDiagnostics(this.languageServer.getClient(), context);
} catch (Throwable e) {
String msg = "Operation 'text/didOpen' failed!";
TextDocumentIdentifier identifier = new TextDocumentIdentifier(params.getTextDocument().getUri());
this.clientLogger.logError(LSContextOperation.TXT_DID_OPEN, msg, e, identifier, (Position) null);
}
}
@Override
public void didChange(DidChangeTextDocumentParams params) {
String fileUri = params.getTextDocument().getUri();
try {
DocumentServiceContext context = ContextBuilder.buildBaseContext(fileUri,
this.workspaceManager,
LSContextOperation.TXT_DID_CHANGE,
this.serverContext);
workspaceManager.didChange(context.filePath(), params);
this.clientLogger.logTrace("Operation '" + LSContextOperation.TXT_DID_CHANGE.getName() +
"' {fileUri: '" + fileUri + "'} updated");
DiagnosticsHelper diagnosticsHelper = DiagnosticsHelper.getInstance(this.serverContext);
diagnosticsHelper.compileAndSendDiagnostics(this.languageServer.getClient(), context);
} catch (Throwable e) {
String msg = "Operation 'text/didChange' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DID_CHANGE, msg, e, params.getTextDocument(),
(Position) null);
}
}
@Override
public void didClose(DidCloseTextDocumentParams params) {
String fileUri = params.getTextDocument().getUri();
try {
DocumentServiceContext context = ContextBuilder.buildBaseContext(fileUri,
this.workspaceManager,
LSContextOperation.TXT_DID_CLOSE,
this.serverContext);
workspaceManager.didClose(context.filePath(), params);
this.clientLogger.logTrace("Operation '" + LSContextOperation.TXT_DID_CLOSE.getName() +
"' {fileUri: '" + fileUri + "'} closed");
} catch (Throwable e) {
String msg = "Operation 'text/didClose' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DID_CLOSE, msg, e, params.getTextDocument(),
(Position) null);
}
}
@Override
public void didSave(DidSaveTextDocumentParams params) {
}
@JsonRequest
public CompletableFuture<List<FoldingRange>> foldingRange(FoldingRangeRequestParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
FoldingRangeContext foldingRangeContext = ContextBuilder.buildFoldingRangeContext(
params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
this.clientCapabilities.getTextDocCapabilities().getFoldingRange().getLineFoldingOnly());
return FoldingRangeProvider.getFoldingRange(foldingRangeContext);
} catch (Throwable e) {
String msg = "Operation 'text/foldingRange' failed!";
this.clientLogger.logError(LSContextOperation.TXT_FOLDING_RANGE, msg, e,
new TextDocumentIdentifier(params.getTextDocument().getUri()),
(Position) null);
return Collections.emptyList();
}
});
}
} | class BallerinaTextDocumentService implements TextDocumentService {
private final BallerinaLanguageServer languageServer;
private LSClientCapabilities clientCapabilities;
private final WorkspaceManager workspaceManager;
private final LanguageServerContext serverContext;
private final LSClientLogger clientLogger;
BallerinaTextDocumentService(BallerinaLanguageServer languageServer,
WorkspaceManager workspaceManager,
LanguageServerContext serverContext) {
this.workspaceManager = workspaceManager;
this.languageServer = languageServer;
this.serverContext = serverContext;
this.clientLogger = LSClientLogger.getInstance(this.serverContext);
}
/**
* Set the client capabilities.
*
* @param clientCapabilities Client's Text Document Capabilities
*/
void setClientCapabilities(LSClientCapabilities clientCapabilities) {
this.clientCapabilities = clientCapabilities;
}
@Override
public CompletableFuture<Either<List<CompletionItem>, CompletionList>> completion(CompletionParams position) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = position.getTextDocument().getUri();
CompletionContext context = ContextBuilder.buildCompletionContext(fileUri,
this.workspaceManager,
this.clientCapabilities.getTextDocCapabilities().getCompletion(),
this.serverContext,
position.getPosition());
try {
return LangExtensionDelegator.instance().completion(position, context, this.serverContext);
} catch (Throwable e) {
String msg = "Operation 'text/completion' failed!";
this.clientLogger.logError(LSContextOperation.TXT_COMPLETION, msg, e, position.getTextDocument(),
position.getPosition());
}
return Either.forLeft(Collections.emptyList());
});
}
@Override
public CompletableFuture<Hover> hover(HoverParams params) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = params.getTextDocument().getUri();
HoverContext context = ContextBuilder
.buildHoverContext(fileUri, this.workspaceManager, this.serverContext, params.getPosition());
Hover hover;
try {
hover = HoverUtil.getHover(context);
} catch (Throwable e) {
String msg = "Operation 'text/hover' failed!";
this.clientLogger.logError(LSContextOperation.TXT_HOVER, msg, e, params.getTextDocument(),
params.getPosition());
hover = HoverUtil.getDefaultHoverObject();
}
return hover;
});
}
@Override
public CompletableFuture<SignatureHelp> signatureHelp(SignatureHelpParams params) {
return CompletableFuture.supplyAsync(() -> {
String uri = params.getTextDocument().getUri();
Optional<Path> sigFilePath = CommonUtil.getPathFromURI(uri);
if (sigFilePath.isEmpty()) {
return new SignatureHelp();
}
SignatureContext context = ContextBuilder.buildSignatureContext(uri,
this.workspaceManager,
this.clientCapabilities.getTextDocCapabilities().getSignatureHelp(),
this.serverContext,
params.getPosition());
try {
return SignatureHelpUtil.getSignatureHelp(context);
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Signature Help", e);
return new SignatureHelp();
} catch (Throwable e) {
String msg = "Operation 'text/signature' failed!";
this.clientLogger.logError(LSContextOperation.TXT_SIGNATURE, msg, e, params.getTextDocument(),
params.getPosition());
return new SignatureHelp();
}
});
}
@Override
public CompletableFuture<Either<List<? extends Location>, List<? extends LocationLink>>> definition
(DefinitionParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
BallerinaDefinitionContext defContext = ContextBuilder.buildDefinitionContext(
params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
params.getPosition());
return Either.forLeft(DefinitionUtil.getDefinition(defContext, params.getPosition()));
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Goto Definition", e);
return Either.forLeft(Collections.emptyList());
} catch (Throwable e) {
String msg = "Operation 'text/definition' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DEFINITION, msg, e, params.getTextDocument(),
params.getPosition());
return Either.forLeft(Collections.emptyList());
}
});
}
@Override
public CompletableFuture<List<? extends Location>> references(ReferenceParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
ReferencesContext context = ContextBuilder.buildReferencesContext(params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
params.getPosition());
Map<Module, List<io.ballerina.tools.diagnostics.Location>> referencesMap =
ReferencesUtil.getReferences(context);
List<Location> references = new ArrayList<>();
referencesMap.forEach((module, locations) ->
locations.forEach(location -> {
String uri = ReferencesUtil.getUriFromLocation(module, location);
references.add(new Location(uri, ReferencesUtil.getRange(location)));
}));
return references;
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Find References", e);
return new ArrayList<>();
} catch (Throwable e) {
String msg = "Operation 'text/references' failed!";
this.clientLogger.logError(LSContextOperation.TXT_REFERENCES, msg, e, params.getTextDocument(),
params.getPosition());
return new ArrayList<>();
}
});
}
@Override
public CompletableFuture<List<Either<SymbolInformation, DocumentSymbol>>>
documentSymbol(DocumentSymbolParams params) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = params.getTextDocument().getUri();
Optional<Path> docSymbolFilePath = CommonUtil.getPathFromURI(fileUri);
if (docSymbolFilePath.isEmpty()) {
return new ArrayList<>();
}
try {
return new ArrayList<>();
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Document Symbols", e);
return new ArrayList<>();
} catch (Throwable e) {
String msg = "Operation 'text/documentSymbol' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DOC_SYMBOL, msg, e, params.getTextDocument(),
(Position) null);
return new ArrayList<>();
}
});
}
@Override
public CompletableFuture<List<Either<Command, CodeAction>>> codeAction(CodeActionParams params) {
return CompletableFuture.supplyAsync(() -> {
String fileUri = params.getTextDocument().getUri();
try {
CodeActionContext context = ContextBuilder.buildCodeActionContext(fileUri, workspaceManager,
this.serverContext, params);
return LangExtensionDelegator.instance().codeActions(params, context, this.serverContext).stream()
.map((Function<CodeAction, Either<Command, CodeAction>>) Either::forRight)
.collect(Collectors.toList());
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Code Action", e);
} catch (Throwable e) {
String msg = "Operation 'text/codeAction' failed!";
Range range = params.getRange();
this.clientLogger.logError(LSContextOperation.TXT_CODE_ACTION, msg, e, params.getTextDocument(),
range.getStart(), range.getEnd());
}
return Collections.emptyList();
});
}
@Override
public CompletableFuture<List<? extends CodeLens>> codeLens(CodeLensParams params) {
return CompletableFuture.supplyAsync(() -> {
List<CodeLens> lenses;
if (!LSCodeLensesProviderHolder.getInstance(this.serverContext).isEnabled()) {
clientCapabilities.getTextDocCapabilities().setCodeLens(null);
return new ArrayList<>();
}
String fileUri = params.getTextDocument().getUri();
Optional<Path> docSymbolFilePath = CommonUtil.getPathFromURI(fileUri);
if (docSymbolFilePath.isEmpty()) {
return new ArrayList<>();
}
DocumentServiceContext codeLensContext = ContextBuilder.buildBaseContext(fileUri,
this.workspaceManager,
LSContextOperation.TXT_CODE_LENS, this.serverContext);
try {
lenses = CodeLensUtil.getCodeLenses(codeLensContext, params.getTextDocument());
return lenses;
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Code Lens", e);
} catch (Throwable e) {
String msg = "Operation 'text/codeLens' failed!";
this.clientLogger.logError(LSContextOperation.TXT_CODE_LENS, msg, e, params.getTextDocument(),
(Position) null);
}
return Collections.emptyList();
});
}
@Override
public CompletableFuture<CodeLens> resolveCodeLens(CodeLens unresolved) {
return null;
}
@Override
public CompletableFuture<List<? extends TextEdit>> formatting(DocumentFormattingParams params) {
return CompletableFuture.supplyAsync(() -> {
TextEdit textEdit = new TextEdit();
String fileUri = params.getTextDocument().getUri();
Optional<Path> formattingFilePath = CommonUtil.getPathFromURI(fileUri);
if (formattingFilePath.isEmpty()) {
return Collections.singletonList(textEdit);
}
try {
CommonUtil.getPathFromURI(fileUri);
Optional<Document> document = workspaceManager.document(formattingFilePath.get());
if (document.isEmpty()) {
return new ArrayList<>();
}
SyntaxTree syntaxTree = document.get().syntaxTree();
String formattedSource = Formatter.format(syntaxTree).toSourceCode();
LinePosition eofPos = syntaxTree.rootNode().lineRange().endLine();
Range range = new Range(new Position(0, 0), new Position(eofPos.line() + 1, eofPos.offset()));
textEdit = new TextEdit(range, formattedSource);
return Collections.singletonList(textEdit);
} catch (UserErrorException | FormatterException e) {
this.clientLogger.notifyUser("Formatting", e);
return Collections.singletonList(textEdit);
} catch (Throwable e) {
String msg = "Operation 'text/formatting' failed!";
this.clientLogger.logError(LSContextOperation.TXT_FORMATTING, msg, e, params.getTextDocument(),
(Position) null);
return Collections.singletonList(textEdit);
}
});
}
/**
* The document range formatting request is sent from the client to the
* server to format a given range in a document.
* <p>
* Registration Options: TextDocumentRegistrationOptions
*/
@Override
public CompletableFuture<List<? extends TextEdit>> rangeFormatting(DocumentRangeFormattingParams params) {
return CompletableFuture.supplyAsync(() -> {
TextEdit textEdit = new TextEdit();
String fileUri = params.getTextDocument().getUri();
Optional<Path> formattingFilePath = CommonUtil.getPathFromURI(fileUri);
if (formattingFilePath.isEmpty()) {
return Collections.singletonList(textEdit);
}
try {
CommonUtil.getPathFromURI(fileUri);
Optional<Document> document = workspaceManager.document(formattingFilePath.get());
if (document.isEmpty()) {
return new ArrayList<>();
}
SyntaxTree syntaxTree = document.get().syntaxTree();
Range range = params.getRange();
LinePosition startPos = LinePosition.from(range.getStart().getLine(), range.getStart().getCharacter());
LinePosition endPos = LinePosition.from(range.getEnd().getLine(), range.getEnd().getCharacter());
LineRange lineRange = LineRange.from(syntaxTree.filePath(), startPos, endPos);
SyntaxTree formattedTree = Formatter.format(syntaxTree, lineRange);
LinePosition eofPos = syntaxTree.rootNode().lineRange().endLine();
Range updateRange = new Range(new Position(0, 0), new Position(eofPos.line() + 1, eofPos.offset()));
textEdit = new TextEdit(updateRange, formattedTree.toSourceCode());
return Collections.singletonList(textEdit);
} catch (UserErrorException | FormatterException e) {
this.clientLogger.notifyUser("Formatting", e);
return Collections.singletonList(textEdit);
} catch (Throwable e) {
String msg = "Operation 'text/rangeFormatting' failed!";
this.clientLogger.logError(LSContextOperation.TXT_RANGE_FORMATTING, msg, e, params.getTextDocument(),
(Position) null);
return Collections.singletonList(textEdit);
}
});
}
@Override
public CompletableFuture<Either<Range, PrepareRenameResult>> prepareRename(PrepareRenameParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
PrepareRenameContext context = ContextBuilder.buildPrepareRenameContext(
params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
params.getPosition());
Optional<Range> range = RenameUtil.prepareRename(context);
if (range.isPresent()) {
return Either.forLeft(range.get());
}
} catch (UserErrorException e) {
this.clientLogger.notifyUser("Rename", e);
} catch (Throwable t) {
String msg = "Operation 'text/prepareRename' failed!";
this.clientLogger.logError(LSContextOperation.TXT_PREPARE_RENAME, msg, t, params.getTextDocument(),
params.getPosition());
}
return null;
});
}
@Override
@Override
public void didOpen(DidOpenTextDocumentParams params) {
String fileUri = params.getTextDocument().getUri();
try {
DocumentServiceContext context = ContextBuilder.buildBaseContext(fileUri, this.workspaceManager,
LSContextOperation.TXT_DID_OPEN, this.serverContext);
this.workspaceManager.didOpen(context.filePath(), params);
this.clientLogger.logTrace("Operation '" + LSContextOperation.TXT_DID_OPEN.getName() +
"' {fileUri: '" + fileUri + "'} opened");
DiagnosticsHelper diagnosticsHelper = DiagnosticsHelper.getInstance(this.serverContext);
diagnosticsHelper.compileAndSendDiagnostics(this.languageServer.getClient(), context);
} catch (Throwable e) {
String msg = "Operation 'text/didOpen' failed!";
TextDocumentIdentifier identifier = new TextDocumentIdentifier(params.getTextDocument().getUri());
this.clientLogger.logError(LSContextOperation.TXT_DID_OPEN, msg, e, identifier, (Position) null);
}
}
@Override
public void didChange(DidChangeTextDocumentParams params) {
String fileUri = params.getTextDocument().getUri();
try {
DocumentServiceContext context = ContextBuilder.buildBaseContext(fileUri,
this.workspaceManager,
LSContextOperation.TXT_DID_CHANGE,
this.serverContext);
workspaceManager.didChange(context.filePath(), params);
this.clientLogger.logTrace("Operation '" + LSContextOperation.TXT_DID_CHANGE.getName() +
"' {fileUri: '" + fileUri + "'} updated");
DiagnosticsHelper diagnosticsHelper = DiagnosticsHelper.getInstance(this.serverContext);
diagnosticsHelper.compileAndSendDiagnostics(this.languageServer.getClient(), context);
} catch (Throwable e) {
String msg = "Operation 'text/didChange' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DID_CHANGE, msg, e, params.getTextDocument(),
(Position) null);
}
}
@Override
public void didClose(DidCloseTextDocumentParams params) {
String fileUri = params.getTextDocument().getUri();
try {
DocumentServiceContext context = ContextBuilder.buildBaseContext(fileUri,
this.workspaceManager,
LSContextOperation.TXT_DID_CLOSE,
this.serverContext);
workspaceManager.didClose(context.filePath(), params);
this.clientLogger.logTrace("Operation '" + LSContextOperation.TXT_DID_CLOSE.getName() +
"' {fileUri: '" + fileUri + "'} closed");
} catch (Throwable e) {
String msg = "Operation 'text/didClose' failed!";
this.clientLogger.logError(LSContextOperation.TXT_DID_CLOSE, msg, e, params.getTextDocument(),
(Position) null);
}
}
@Override
public void didSave(DidSaveTextDocumentParams params) {
}
@JsonRequest
public CompletableFuture<List<FoldingRange>> foldingRange(FoldingRangeRequestParams params) {
return CompletableFuture.supplyAsync(() -> {
try {
FoldingRangeContext foldingRangeContext = ContextBuilder.buildFoldingRangeContext(
params.getTextDocument().getUri(),
this.workspaceManager,
this.serverContext,
this.clientCapabilities.getTextDocCapabilities().getFoldingRange().getLineFoldingOnly());
return FoldingRangeProvider.getFoldingRange(foldingRangeContext);
} catch (Throwable e) {
String msg = "Operation 'text/foldingRange' failed!";
this.clientLogger.logError(LSContextOperation.TXT_FOLDING_RANGE, msg, e,
new TextDocumentIdentifier(params.getTextDocument().getUri()),
(Position) null);
return Collections.emptyList();
}
});
}
} |
can not bind before write edit log. or clean up when failed. | public Table createTable(LocalMetastore metastore, Database db, CreateTableStmt stmt) throws DdlException {
GlobalStateMgr stateMgr = metastore.getStateMgr();
ColocateTableIndex colocateTableIndex = metastore.getColocateTableIndex();
String tableName = stmt.getTableName();
LOG.debug("begin create olap table: {}", tableName);
List<Column> baseSchema = stmt.getColumns();
metastore.validateColumns(baseSchema);
PartitionDesc partitionDesc = stmt.getPartitionDesc();
PartitionInfo partitionInfo;
Map<String, Long> partitionNameToId = Maps.newHashMap();
if (partitionDesc != null) {
if (partitionDesc instanceof RangePartitionDesc) {
RangePartitionDesc rangePartitionDesc = (RangePartitionDesc) partitionDesc;
for (SingleRangePartitionDesc desc : rangePartitionDesc.getSingleRangePartitionDescs()) {
long partitionId = metastore.getNextId();
partitionNameToId.put(desc.getPartitionName(), partitionId);
}
} else if (partitionDesc instanceof ListPartitionDesc) {
ListPartitionDesc listPartitionDesc = (ListPartitionDesc) partitionDesc;
listPartitionDesc.findAllPartitionNames()
.forEach(partitionName -> partitionNameToId.put(partitionName, metastore.getNextId()));
} else if (partitionDesc instanceof ExpressionPartitionDesc) {
ExpressionPartitionDesc expressionPartitionDesc = (ExpressionPartitionDesc) partitionDesc;
for (SingleRangePartitionDesc desc : expressionPartitionDesc.getRangePartitionDesc()
.getSingleRangePartitionDescs()) {
long partitionId = metastore.getNextId();
partitionNameToId.put(desc.getPartitionName(), partitionId);
}
DynamicPartitionUtil.checkIfAutomaticPartitionAllowed(stmt.getProperties());
} else {
throw new DdlException("Currently only support range or list partition with engine type olap");
}
partitionInfo = partitionDesc.toPartitionInfo(baseSchema, partitionNameToId, false);
if (partitionInfo.isAutomaticPartition()) {
long partitionId = metastore.getNextId();
String replicateNum = String.valueOf(RunMode.defaultReplicationNum());
if (stmt.getProperties() != null) {
replicateNum = stmt.getProperties().getOrDefault("replication_num",
String.valueOf(RunMode.defaultReplicationNum()));
}
partitionInfo.createAutomaticShadowPartition(partitionId, replicateNum);
partitionNameToId.put(ExpressionRangePartitionInfo.AUTOMATIC_SHADOW_PARTITION_NAME, partitionId);
}
} else {
if (DynamicPartitionUtil.checkDynamicPartitionPropertiesExist(stmt.getProperties())) {
throw new DdlException("Only support dynamic partition properties on range partition table");
}
long partitionId = metastore.getNextId();
partitionNameToId.put(tableName, partitionId);
partitionInfo = new SinglePartitionInfo();
}
KeysDesc keysDesc = stmt.getKeysDesc();
Preconditions.checkNotNull(keysDesc);
KeysType keysType = keysDesc.getKeysType();
DistributionDesc distributionDesc = stmt.getDistributionDesc();
Preconditions.checkNotNull(distributionDesc);
DistributionInfo distributionInfo = distributionDesc.toDistributionInfo(baseSchema);
short shortKeyColumnCount = 0;
List<Integer> sortKeyIdxes = new ArrayList<>();
if (stmt.getSortKeys() != null) {
List<String> baseSchemaNames = baseSchema.stream().map(Column::getName).collect(Collectors.toList());
for (String column : stmt.getSortKeys()) {
int idx = baseSchemaNames.indexOf(column);
if (idx == -1) {
throw new DdlException("Invalid column '" + column + "': not exists in all columns.");
}
sortKeyIdxes.add(idx);
}
shortKeyColumnCount =
GlobalStateMgr.calcShortKeyColumnCount(baseSchema, stmt.getProperties(), sortKeyIdxes);
} else {
shortKeyColumnCount = GlobalStateMgr.calcShortKeyColumnCount(baseSchema, stmt.getProperties());
}
LOG.debug("create table[{}] short key column count: {}", tableName, shortKeyColumnCount);
TableIndexes indexes = new TableIndexes(stmt.getIndexes());
Map<String, String> properties = stmt.getProperties();
long tableId = GlobalStateMgr.getCurrentState().getNextId();
OlapTable table;
String storageVolumeId = "";
if (stmt.isExternal()) {
table = new ExternalOlapTable(db.getId(), tableId, tableName, baseSchema, keysType, partitionInfo,
distributionInfo, indexes, properties);
if (GlobalStateMgr.getCurrentState().getNodeMgr()
.checkFeExistByRPCPort(((ExternalOlapTable) table).getSourceTableHost(),
((ExternalOlapTable) table).getSourceTablePort())) {
throw new DdlException("can not create OLAP external table of self cluster");
}
} else if (stmt.isOlapEngine()) {
RunMode runMode = RunMode.getCurrentRunMode();
String volume = "";
if (properties != null && properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_VOLUME)) {
volume = properties.remove(PropertyAnalyzer.PROPERTIES_STORAGE_VOLUME);
}
if (runMode == RunMode.SHARED_DATA) {
if (volume.equals(StorageVolumeMgr.LOCAL)) {
throw new DdlException("Cannot create table " +
"without persistent volume in current run mode \"" + runMode + "\"");
}
StorageVolumeMgr svm = GlobalStateMgr.getCurrentState().getStorageVolumeMgr();
StorageVolume sv = null;
if (volume.isEmpty()) {
String dbStorageVolumeId = svm.getStorageVolumeIdOfDb(db.getId());
if (dbStorageVolumeId != null) {
sv = svm.getStorageVolume(dbStorageVolumeId);
} else {
sv = svm.getStorageVolumeByName(SharedDataStorageVolumeMgr.BUILTIN_STORAGE_VOLUME);
}
} else if (volume.equals(StorageVolumeMgr.DEFAULT)) {
sv = svm.getDefaultStorageVolume();
} else {
sv = svm.getStorageVolumeByName(volume);
}
if (sv == null) {
throw new DdlException("Unknown storage volume \"" + volume + "\"");
}
table = new LakeTable(tableId, tableName, baseSchema, keysType, partitionInfo, distributionInfo, indexes);
storageVolumeId = sv.getId();
metastore.setLakeStorageInfo(table, storageVolumeId, properties);
table.setStorageVolume(sv.getName());
} else {
table = new OlapTable(tableId, tableName, baseSchema, keysType, partitionInfo, distributionInfo, indexes);
table.setStorageVolume(StorageVolumeMgr.LOCAL);
}
} else {
throw new DdlException("Unrecognized engine \"" + stmt.getEngineName() + "\"");
}
table.setComment(stmt.getComment());
long baseIndexId = metastore.getNextId();
table.setBaseIndexId(baseIndexId);
Set<String> bfColumns = null;
double bfFpp = 0;
try {
bfColumns = PropertyAnalyzer.analyzeBloomFilterColumns(properties, baseSchema,
table.getKeysType() == KeysType.PRIMARY_KEYS);
if (bfColumns != null && bfColumns.isEmpty()) {
bfColumns = null;
}
bfFpp = PropertyAnalyzer.analyzeBloomFilterFpp(properties);
if (bfColumns != null && bfFpp == 0) {
bfFpp = FeConstants.DEFAULT_BLOOM_FILTER_FPP;
} else if (bfColumns == null) {
bfFpp = 0;
}
table.setBloomFilterInfo(bfColumns, bfFpp);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
short replicationNum = RunMode.defaultReplicationNum();
try {
boolean isReplicationNumSet =
properties != null && properties.containsKey(PropertyAnalyzer.PROPERTIES_REPLICATION_NUM);
replicationNum = PropertyAnalyzer.analyzeReplicationNum(properties, replicationNum);
if (isReplicationNumSet) {
table.setReplicationNum(replicationNum);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
boolean isInMemory =
PropertyAnalyzer.analyzeBooleanProp(properties, PropertyAnalyzer.PROPERTIES_INMEMORY, false);
table.setIsInMemory(isInMemory);
boolean enablePersistentIndex =
PropertyAnalyzer.analyzeBooleanProp(properties, PropertyAnalyzer.PROPERTIES_ENABLE_PERSISTENT_INDEX,
false);
if (enablePersistentIndex && table.isCloudNativeTable()) {
throw new DdlException("Cannot create cloud native table with persistent index yet");
}
table.setEnablePersistentIndex(enablePersistentIndex);
if (properties != null && (properties.containsKey(PropertyAnalyzer.PROPERTIES_BINLOG_ENABLE) ||
properties.containsKey(PropertyAnalyzer.PROPERTIES_BINLOG_MAX_SIZE) ||
properties.containsKey(PropertyAnalyzer.PROPERTIES_BINLOG_TTL))) {
try {
boolean enableBinlog = PropertyAnalyzer.analyzeBooleanProp(properties,
PropertyAnalyzer.PROPERTIES_BINLOG_ENABLE, false);
long binlogTtl = PropertyAnalyzer.analyzeLongProp(properties,
PropertyAnalyzer.PROPERTIES_BINLOG_TTL, Config.binlog_ttl_second);
long binlogMaxSize = PropertyAnalyzer.analyzeLongProp(properties,
PropertyAnalyzer.PROPERTIES_BINLOG_MAX_SIZE, Config.binlog_max_size);
BinlogConfig binlogConfig = new BinlogConfig(0, enableBinlog,
binlogTtl, binlogMaxSize);
table.setCurBinlogConfig(binlogConfig);
LOG.info("create table {} set binlog config, enable_binlog = {}, binlogTtl = {}, binlog_max_size = {}",
tableName, enableBinlog, binlogTtl, binlogMaxSize);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
try {
table.setWriteQuorum(PropertyAnalyzer.analyzeWriteQuorum(properties));
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
table.setEnableReplicatedStorage(
PropertyAnalyzer.analyzeBooleanProp(
properties, PropertyAnalyzer.PROPERTIES_REPLICATED_STORAGE,
Config.enable_replicated_storage_as_default_engine));
if (table.enableReplicatedStorage().equals(false)) {
for (Column col : baseSchema) {
if (col.isAutoIncrement()) {
throw new DdlException("Table with AUTO_INCREMENT column must use Replicated Storage");
}
}
}
TTabletType tabletType = TTabletType.TABLET_TYPE_DISK;
try {
tabletType = PropertyAnalyzer.analyzeTabletType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
if (table.isCloudNativeTable() && properties != null) {
try {
PeriodDuration duration = PropertyAnalyzer.analyzeDataCachePartitionDuration(properties);
if (duration != null) {
table.setDataCachePartitionDuration(duration);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
long partitionId = partitionNameToId.get(tableName);
DataProperty dataProperty = null;
try {
boolean hasMedium = false;
if (properties != null) {
hasMedium = properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_MEDIUM);
}
dataProperty = PropertyAnalyzer.analyzeDataProperty(properties,
DataProperty.getInferredDefaultDataProperty(), false);
if (hasMedium) {
table.setStorageMedium(dataProperty.getStorageMedium());
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(dataProperty);
partitionInfo.setDataProperty(partitionId, dataProperty);
partitionInfo.setReplicationNum(partitionId, replicationNum);
partitionInfo.setIsInMemory(partitionId, isInMemory);
partitionInfo.setTabletType(partitionId, tabletType);
StorageInfo storageInfo = table.getTableProperty().getStorageInfo();
DataCacheInfo dataCacheInfo = storageInfo == null ? null : storageInfo.getDataCacheInfo();
partitionInfo.setDataCacheInfo(partitionId, dataCacheInfo);
}
String colocateGroup = PropertyAnalyzer.analyzeColocate(properties);
boolean addedToColocateGroup = colocateTableIndex.addTableToGroup(db, table,
colocateGroup, false /* expectLakeTable */);
if (!(table instanceof ExternalOlapTable) && addedToColocateGroup) {
DistributionInfo defaultDistributionInfo = table.getDefaultDistributionInfo();
if (defaultDistributionInfo.getBucketNum() == 0) {
int bucketNum = CatalogUtils.calBucketNumAccordingToBackends();
defaultDistributionInfo.setBucketNum(bucketNum);
}
}
TStorageType baseIndexStorageType = null;
try {
baseIndexStorageType = PropertyAnalyzer.analyzeStorageType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(baseIndexStorageType);
int schemaVersion = 0;
try {
schemaVersion = PropertyAnalyzer.analyzeSchemaVersion(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
int schemaHash = Util.schemaHash(schemaVersion, baseSchema, bfColumns, bfFpp);
if (stmt.getSortKeys() != null) {
table.setIndexMeta(baseIndexId, tableName, baseSchema, schemaVersion, schemaHash,
shortKeyColumnCount, baseIndexStorageType, keysType, null, sortKeyIdxes);
} else {
table.setIndexMeta(baseIndexId, tableName, baseSchema, schemaVersion, schemaHash,
shortKeyColumnCount, baseIndexStorageType, keysType, null);
}
for (AlterClause alterClause : stmt.getRollupAlterClauseList()) {
AddRollupClause addRollupClause = (AddRollupClause) alterClause;
Long baseRollupIndex = table.getIndexIdByName(tableName);
TStorageType rollupIndexStorageType = null;
try {
rollupIndexStorageType = PropertyAnalyzer.analyzeStorageType(addRollupClause.getProperties());
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(rollupIndexStorageType);
List<Column> rollupColumns = stateMgr.getRollupHandler().checkAndPrepareMaterializedView(addRollupClause,
table, baseRollupIndex);
short rollupShortKeyColumnCount =
GlobalStateMgr.calcShortKeyColumnCount(rollupColumns, alterClause.getProperties());
int rollupSchemaHash = Util.schemaHash(schemaVersion, rollupColumns, bfColumns, bfFpp);
long rollupIndexId = metastore.getNextId();
table.setIndexMeta(rollupIndexId, addRollupClause.getRollupName(), rollupColumns, schemaVersion,
rollupSchemaHash, rollupShortKeyColumnCount, rollupIndexStorageType, keysType);
}
Long version = null;
try {
version = PropertyAnalyzer.analyzeVersionInfo(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(version);
if (properties != null) {
properties.remove("storage_format");
}
TCompressionType compressionType = TCompressionType.LZ4_FRAME;
try {
compressionType = PropertyAnalyzer.analyzeCompressionType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
table.setCompressionType(compressionType);
int partitionLiveNumber;
if (properties != null && properties.containsKey(PropertyAnalyzer.PROPERTIES_PARTITION_LIVE_NUMBER)) {
try {
partitionLiveNumber = PropertyAnalyzer.analyzePartitionLiveNumber(properties, true);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
table.setPartitionLiveNumber(partitionLiveNumber);
}
try {
processConstraint(db, table, properties);
} catch (AnalysisException e) {
throw new DdlException(
String.format("processing constraint failed when creating table:%s. exception msg:%s",
table.getName(), e.getMessage()), e);
}
Set<Long> tabletIdSet = new HashSet<Long>();
if (table.isOlapOrCloudNativeTable()) {
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
if (properties != null && !properties.isEmpty()) {
throw new DdlException("Unknown properties: " + properties);
}
long partitionId = partitionNameToId.get(tableName);
Partition partition = metastore.createPartition(db, table, partitionId, tableName, version, tabletIdSet);
metastore.buildPartitions(db, table, Collections.singletonList(partition));
table.addPartition(partition);
} else if (partitionInfo.isRangePartition() || partitionInfo.getType() == PartitionType.LIST) {
try {
boolean hasMedium = false;
if (properties != null) {
hasMedium = properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_MEDIUM);
}
DataProperty dataProperty = PropertyAnalyzer.analyzeDataProperty(properties,
DataProperty.getInferredDefaultDataProperty(), false);
DynamicPartitionUtil.checkAndSetDynamicPartitionProperty(table, properties);
if (table.dynamicPartitionExists() && table.getColocateGroup() != null) {
HashDistributionInfo info = (HashDistributionInfo) distributionInfo;
if (info.getBucketNum() !=
table.getTableProperty().getDynamicPartitionProperty().getBuckets()) {
throw new DdlException("dynamic_partition.buckets should equal the distribution buckets"
+ " if creating a colocate table");
}
}
if (hasMedium) {
table.setStorageMedium(dataProperty.getStorageMedium());
}
if (properties != null && !properties.isEmpty()) {
throw new DdlException("Unknown properties: " + properties);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
List<Partition> partitions = new ArrayList<>(partitionNameToId.size());
for (Map.Entry<String, Long> entry : partitionNameToId.entrySet()) {
Partition partition = metastore.createPartition(db, table, entry.getValue(), entry.getKey(), version,
tabletIdSet);
partitions.add(partition);
}
metastore.buildPartitions(db, table, partitions);
for (Partition partition : partitions) {
table.addPartition(partition);
}
} else {
throw new DdlException("Unsupported partition method: " + partitionInfo.getType().name());
}
if (table.isBinlogEnabled()) {
Map<String, String> binlogAvailableVersion = table.buildBinlogAvailableVersion();
table.setBinlogAvailableVersion(binlogAvailableVersion);
LOG.info("set binlog available version when create table, tableName : {}, partitions : {}",
tableName, binlogAvailableVersion.toString());
}
}
colocateTableIndex.addTableToGroup(db, table, colocateGroup, true /* expectLakeTable */);
if (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA && !storageVolumeId.isEmpty()) {
StorageVolumeMgr svm = GlobalStateMgr.getCurrentState().getStorageVolumeMgr();
svm.bindTableToStorageVolume(storageVolumeId, tableId);
}
LOG.info("Successfully create table[{};{}]", tableName, tableId);
return table;
} | svm.bindTableToStorageVolume(storageVolumeId, tableId); | public Table createTable(LocalMetastore metastore, Database db, CreateTableStmt stmt) throws DdlException {
GlobalStateMgr stateMgr = metastore.getStateMgr();
ColocateTableIndex colocateTableIndex = metastore.getColocateTableIndex();
String tableName = stmt.getTableName();
LOG.debug("begin create olap table: {}", tableName);
List<Column> baseSchema = stmt.getColumns();
metastore.validateColumns(baseSchema);
PartitionDesc partitionDesc = stmt.getPartitionDesc();
PartitionInfo partitionInfo;
Map<String, Long> partitionNameToId = Maps.newHashMap();
if (partitionDesc != null) {
if (partitionDesc instanceof RangePartitionDesc) {
RangePartitionDesc rangePartitionDesc = (RangePartitionDesc) partitionDesc;
for (SingleRangePartitionDesc desc : rangePartitionDesc.getSingleRangePartitionDescs()) {
long partitionId = metastore.getNextId();
partitionNameToId.put(desc.getPartitionName(), partitionId);
}
} else if (partitionDesc instanceof ListPartitionDesc) {
ListPartitionDesc listPartitionDesc = (ListPartitionDesc) partitionDesc;
listPartitionDesc.findAllPartitionNames()
.forEach(partitionName -> partitionNameToId.put(partitionName, metastore.getNextId()));
} else if (partitionDesc instanceof ExpressionPartitionDesc) {
ExpressionPartitionDesc expressionPartitionDesc = (ExpressionPartitionDesc) partitionDesc;
for (SingleRangePartitionDesc desc : expressionPartitionDesc.getRangePartitionDesc()
.getSingleRangePartitionDescs()) {
long partitionId = metastore.getNextId();
partitionNameToId.put(desc.getPartitionName(), partitionId);
}
DynamicPartitionUtil.checkIfAutomaticPartitionAllowed(stmt.getProperties());
} else {
throw new DdlException("Currently only support range or list partition with engine type olap");
}
partitionInfo = partitionDesc.toPartitionInfo(baseSchema, partitionNameToId, false);
if (partitionInfo.isAutomaticPartition()) {
long partitionId = metastore.getNextId();
String replicateNum = String.valueOf(RunMode.defaultReplicationNum());
if (stmt.getProperties() != null) {
replicateNum = stmt.getProperties().getOrDefault("replication_num",
String.valueOf(RunMode.defaultReplicationNum()));
}
partitionInfo.createAutomaticShadowPartition(partitionId, replicateNum);
partitionNameToId.put(ExpressionRangePartitionInfo.AUTOMATIC_SHADOW_PARTITION_NAME, partitionId);
}
} else {
if (DynamicPartitionUtil.checkDynamicPartitionPropertiesExist(stmt.getProperties())) {
throw new DdlException("Only support dynamic partition properties on range partition table");
}
long partitionId = metastore.getNextId();
partitionNameToId.put(tableName, partitionId);
partitionInfo = new SinglePartitionInfo();
}
KeysDesc keysDesc = stmt.getKeysDesc();
Preconditions.checkNotNull(keysDesc);
KeysType keysType = keysDesc.getKeysType();
DistributionDesc distributionDesc = stmt.getDistributionDesc();
Preconditions.checkNotNull(distributionDesc);
DistributionInfo distributionInfo = distributionDesc.toDistributionInfo(baseSchema);
short shortKeyColumnCount = 0;
List<Integer> sortKeyIdxes = new ArrayList<>();
if (stmt.getSortKeys() != null) {
List<String> baseSchemaNames = baseSchema.stream().map(Column::getName).collect(Collectors.toList());
for (String column : stmt.getSortKeys()) {
int idx = baseSchemaNames.indexOf(column);
if (idx == -1) {
throw new DdlException("Invalid column '" + column + "': not exists in all columns.");
}
sortKeyIdxes.add(idx);
}
shortKeyColumnCount =
GlobalStateMgr.calcShortKeyColumnCount(baseSchema, stmt.getProperties(), sortKeyIdxes);
} else {
shortKeyColumnCount = GlobalStateMgr.calcShortKeyColumnCount(baseSchema, stmt.getProperties());
}
LOG.debug("create table[{}] short key column count: {}", tableName, shortKeyColumnCount);
TableIndexes indexes = new TableIndexes(stmt.getIndexes());
Map<String, String> properties = stmt.getProperties();
long tableId = GlobalStateMgr.getCurrentState().getNextId();
OlapTable table;
if (stmt.isExternal()) {
table = new ExternalOlapTable(db.getId(), tableId, tableName, baseSchema, keysType, partitionInfo,
distributionInfo, indexes, properties);
if (GlobalStateMgr.getCurrentState().getNodeMgr()
.checkFeExistByRPCPort(((ExternalOlapTable) table).getSourceTableHost(),
((ExternalOlapTable) table).getSourceTablePort())) {
throw new DdlException("can not create OLAP external table of self cluster");
}
} else if (stmt.isOlapEngine()) {
RunMode runMode = RunMode.getCurrentRunMode();
String volume = "";
if (properties != null && properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_VOLUME)) {
volume = properties.remove(PropertyAnalyzer.PROPERTIES_STORAGE_VOLUME);
}
if (runMode == RunMode.SHARED_DATA) {
if (volume.equals(StorageVolumeMgr.LOCAL)) {
throw new DdlException("Cannot create table " +
"without persistent volume in current run mode \"" + runMode + "\"");
}
StorageVolumeMgr svm = GlobalStateMgr.getCurrentState().getStorageVolumeMgr();
StorageVolume sv = null;
if (volume.isEmpty()) {
String dbStorageVolumeId = svm.getStorageVolumeIdOfDb(db.getId());
if (dbStorageVolumeId != null) {
sv = svm.getStorageVolume(dbStorageVolumeId);
} else {
sv = svm.getStorageVolumeByName(SharedDataStorageVolumeMgr.BUILTIN_STORAGE_VOLUME);
}
} else if (volume.equals(StorageVolumeMgr.DEFAULT)) {
sv = svm.getDefaultStorageVolume();
} else {
sv = svm.getStorageVolumeByName(volume);
}
if (sv == null) {
throw new DdlException("Unknown storage volume \"" + volume + "\"");
}
table = new LakeTable(tableId, tableName, baseSchema, keysType, partitionInfo, distributionInfo, indexes);
String storageVolumeId = sv.getId();
metastore.setLakeStorageInfo(table, storageVolumeId, properties);
if (!svm.bindTableToStorageVolume(sv.getId(), table.getId())) {
throw new DdlException(String.format("Storage volume with id %s not exists", storageVolumeId));
}
table.setStorageVolume(sv.getName());
} else {
table = new OlapTable(tableId, tableName, baseSchema, keysType, partitionInfo, distributionInfo, indexes);
table.setStorageVolume(StorageVolumeMgr.LOCAL);
}
} else {
throw new DdlException("Unrecognized engine \"" + stmt.getEngineName() + "\"");
}
try {
table.setComment(stmt.getComment());
long baseIndexId = metastore.getNextId();
table.setBaseIndexId(baseIndexId);
Set<String> bfColumns = null;
double bfFpp = 0;
try {
bfColumns = PropertyAnalyzer.analyzeBloomFilterColumns(properties, baseSchema,
table.getKeysType() == KeysType.PRIMARY_KEYS);
if (bfColumns != null && bfColumns.isEmpty()) {
bfColumns = null;
}
bfFpp = PropertyAnalyzer.analyzeBloomFilterFpp(properties);
if (bfColumns != null && bfFpp == 0) {
bfFpp = FeConstants.DEFAULT_BLOOM_FILTER_FPP;
} else if (bfColumns == null) {
bfFpp = 0;
}
table.setBloomFilterInfo(bfColumns, bfFpp);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
short replicationNum = RunMode.defaultReplicationNum();
try {
boolean isReplicationNumSet =
properties != null && properties.containsKey(PropertyAnalyzer.PROPERTIES_REPLICATION_NUM);
replicationNum = PropertyAnalyzer.analyzeReplicationNum(properties, replicationNum);
if (isReplicationNumSet) {
table.setReplicationNum(replicationNum);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
boolean isInMemory =
PropertyAnalyzer.analyzeBooleanProp(properties, PropertyAnalyzer.PROPERTIES_INMEMORY, false);
table.setIsInMemory(isInMemory);
boolean enablePersistentIndex =
PropertyAnalyzer.analyzeBooleanProp(properties, PropertyAnalyzer.PROPERTIES_ENABLE_PERSISTENT_INDEX,
false);
if (enablePersistentIndex && table.isCloudNativeTable()) {
throw new DdlException("Cannot create cloud native table with persistent index yet");
}
table.setEnablePersistentIndex(enablePersistentIndex);
if (properties != null && (properties.containsKey(PropertyAnalyzer.PROPERTIES_BINLOG_ENABLE) ||
properties.containsKey(PropertyAnalyzer.PROPERTIES_BINLOG_MAX_SIZE) ||
properties.containsKey(PropertyAnalyzer.PROPERTIES_BINLOG_TTL))) {
try {
boolean enableBinlog = PropertyAnalyzer.analyzeBooleanProp(properties,
PropertyAnalyzer.PROPERTIES_BINLOG_ENABLE, false);
long binlogTtl = PropertyAnalyzer.analyzeLongProp(properties,
PropertyAnalyzer.PROPERTIES_BINLOG_TTL, Config.binlog_ttl_second);
long binlogMaxSize = PropertyAnalyzer.analyzeLongProp(properties,
PropertyAnalyzer.PROPERTIES_BINLOG_MAX_SIZE, Config.binlog_max_size);
BinlogConfig binlogConfig = new BinlogConfig(0, enableBinlog,
binlogTtl, binlogMaxSize);
table.setCurBinlogConfig(binlogConfig);
LOG.info("create table {} set binlog config, enable_binlog = {}, binlogTtl = {}, binlog_max_size = {}",
tableName, enableBinlog, binlogTtl, binlogMaxSize);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
try {
table.setWriteQuorum(PropertyAnalyzer.analyzeWriteQuorum(properties));
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
table.setEnableReplicatedStorage(
PropertyAnalyzer.analyzeBooleanProp(
properties, PropertyAnalyzer.PROPERTIES_REPLICATED_STORAGE,
Config.enable_replicated_storage_as_default_engine));
if (table.enableReplicatedStorage().equals(false)) {
for (Column col : baseSchema) {
if (col.isAutoIncrement()) {
throw new DdlException("Table with AUTO_INCREMENT column must use Replicated Storage");
}
}
}
TTabletType tabletType = TTabletType.TABLET_TYPE_DISK;
try {
tabletType = PropertyAnalyzer.analyzeTabletType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
if (table.isCloudNativeTable() && properties != null) {
try {
PeriodDuration duration = PropertyAnalyzer.analyzeDataCachePartitionDuration(properties);
if (duration != null) {
table.setDataCachePartitionDuration(duration);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
}
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
long partitionId = partitionNameToId.get(tableName);
DataProperty dataProperty = null;
try {
boolean hasMedium = false;
if (properties != null) {
hasMedium = properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_MEDIUM);
}
dataProperty = PropertyAnalyzer.analyzeDataProperty(properties,
DataProperty.getInferredDefaultDataProperty(), false);
if (hasMedium) {
table.setStorageMedium(dataProperty.getStorageMedium());
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(dataProperty);
partitionInfo.setDataProperty(partitionId, dataProperty);
partitionInfo.setReplicationNum(partitionId, replicationNum);
partitionInfo.setIsInMemory(partitionId, isInMemory);
partitionInfo.setTabletType(partitionId, tabletType);
StorageInfo storageInfo = table.getTableProperty().getStorageInfo();
DataCacheInfo dataCacheInfo = storageInfo == null ? null : storageInfo.getDataCacheInfo();
partitionInfo.setDataCacheInfo(partitionId, dataCacheInfo);
}
String colocateGroup = PropertyAnalyzer.analyzeColocate(properties);
boolean addedToColocateGroup = colocateTableIndex.addTableToGroup(db, table,
colocateGroup, false /* expectLakeTable */);
if (!(table instanceof ExternalOlapTable) && addedToColocateGroup) {
DistributionInfo defaultDistributionInfo = table.getDefaultDistributionInfo();
if (defaultDistributionInfo.getBucketNum() == 0) {
int bucketNum = CatalogUtils.calBucketNumAccordingToBackends();
defaultDistributionInfo.setBucketNum(bucketNum);
}
}
TStorageType baseIndexStorageType = null;
try {
baseIndexStorageType = PropertyAnalyzer.analyzeStorageType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(baseIndexStorageType);
int schemaVersion = 0;
try {
schemaVersion = PropertyAnalyzer.analyzeSchemaVersion(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
int schemaHash = Util.schemaHash(schemaVersion, baseSchema, bfColumns, bfFpp);
if (stmt.getSortKeys() != null) {
table.setIndexMeta(baseIndexId, tableName, baseSchema, schemaVersion, schemaHash,
shortKeyColumnCount, baseIndexStorageType, keysType, null, sortKeyIdxes);
} else {
table.setIndexMeta(baseIndexId, tableName, baseSchema, schemaVersion, schemaHash,
shortKeyColumnCount, baseIndexStorageType, keysType, null);
}
for (AlterClause alterClause : stmt.getRollupAlterClauseList()) {
AddRollupClause addRollupClause = (AddRollupClause) alterClause;
Long baseRollupIndex = table.getIndexIdByName(tableName);
TStorageType rollupIndexStorageType = null;
try {
rollupIndexStorageType = PropertyAnalyzer.analyzeStorageType(addRollupClause.getProperties());
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(rollupIndexStorageType);
List<Column> rollupColumns = stateMgr.getRollupHandler().checkAndPrepareMaterializedView(addRollupClause,
table, baseRollupIndex);
short rollupShortKeyColumnCount =
GlobalStateMgr.calcShortKeyColumnCount(rollupColumns, alterClause.getProperties());
int rollupSchemaHash = Util.schemaHash(schemaVersion, rollupColumns, bfColumns, bfFpp);
long rollupIndexId = metastore.getNextId();
table.setIndexMeta(rollupIndexId, addRollupClause.getRollupName(), rollupColumns, schemaVersion,
rollupSchemaHash, rollupShortKeyColumnCount, rollupIndexStorageType, keysType);
}
Long version = null;
try {
version = PropertyAnalyzer.analyzeVersionInfo(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
Preconditions.checkNotNull(version);
if (properties != null) {
properties.remove("storage_format");
}
TCompressionType compressionType = TCompressionType.LZ4_FRAME;
try {
compressionType = PropertyAnalyzer.analyzeCompressionType(properties);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
table.setCompressionType(compressionType);
int partitionLiveNumber;
if (properties != null && properties.containsKey(PropertyAnalyzer.PROPERTIES_PARTITION_LIVE_NUMBER)) {
try {
partitionLiveNumber = PropertyAnalyzer.analyzePartitionLiveNumber(properties, true);
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
table.setPartitionLiveNumber(partitionLiveNumber);
}
try {
processConstraint(db, table, properties);
} catch (AnalysisException e) {
throw new DdlException(
String.format("processing constraint failed when creating table:%s. exception msg:%s",
table.getName(), e.getMessage()), e);
}
Set<Long> tabletIdSet = new HashSet<Long>();
if (table.isOlapOrCloudNativeTable()) {
if (partitionInfo.getType() == PartitionType.UNPARTITIONED) {
if (properties != null && !properties.isEmpty()) {
throw new DdlException("Unknown properties: " + properties);
}
long partitionId = partitionNameToId.get(tableName);
Partition partition = metastore.createPartition(db, table, partitionId, tableName, version, tabletIdSet);
metastore.buildPartitions(db, table, Collections.singletonList(partition));
table.addPartition(partition);
} else if (partitionInfo.isRangePartition() || partitionInfo.getType() == PartitionType.LIST) {
try {
boolean hasMedium = false;
if (properties != null) {
hasMedium = properties.containsKey(PropertyAnalyzer.PROPERTIES_STORAGE_MEDIUM);
}
DataProperty dataProperty = PropertyAnalyzer.analyzeDataProperty(properties,
DataProperty.getInferredDefaultDataProperty(), false);
DynamicPartitionUtil.checkAndSetDynamicPartitionProperty(table, properties);
if (table.dynamicPartitionExists() && table.getColocateGroup() != null) {
HashDistributionInfo info = (HashDistributionInfo) distributionInfo;
if (info.getBucketNum() !=
table.getTableProperty().getDynamicPartitionProperty().getBuckets()) {
throw new DdlException("dynamic_partition.buckets should equal the distribution buckets"
+ " if creating a colocate table");
}
}
if (hasMedium) {
table.setStorageMedium(dataProperty.getStorageMedium());
}
if (properties != null && !properties.isEmpty()) {
throw new DdlException("Unknown properties: " + properties);
}
} catch (AnalysisException e) {
throw new DdlException(e.getMessage());
}
List<Partition> partitions = new ArrayList<>(partitionNameToId.size());
for (Map.Entry<String, Long> entry : partitionNameToId.entrySet()) {
Partition partition = metastore.createPartition(db, table, entry.getValue(), entry.getKey(), version,
tabletIdSet);
partitions.add(partition);
}
metastore.buildPartitions(db, table, partitions);
for (Partition partition : partitions) {
table.addPartition(partition);
}
} else {
throw new DdlException("Unsupported partition method: " + partitionInfo.getType().name());
}
if (table.isBinlogEnabled()) {
Map<String, String> binlogAvailableVersion = table.buildBinlogAvailableVersion();
table.setBinlogAvailableVersion(binlogAvailableVersion);
LOG.info("set binlog available version when create table, tableName : {}, partitions : {}",
tableName, binlogAvailableVersion.toString());
}
}
colocateTableIndex.addTableToGroup(db, table, colocateGroup, true /* expectLakeTable */);
} catch (DdlException e) {
if (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA) {
GlobalStateMgr.getCurrentState().getStorageVolumeMgr().unbindTableToStorageVolume(tableId);
}
throw e;
}
LOG.info("Successfully create table[{};{}]", tableName, tableId);
return table;
} | class OlapTableFactory implements AbstractTableFactory {
private static final Logger LOG = LogManager.getLogger(OlapTableFactory.class);
public static final OlapTableFactory INSTANCE = new OlapTableFactory();
private OlapTableFactory() {
}
@Override
@NotNull
private void processConstraint(
Database db, OlapTable olapTable, Map<String, String> properties) throws AnalysisException {
List<UniqueConstraint> uniqueConstraints = PropertyAnalyzer.analyzeUniqueConstraint(properties, db, olapTable);
if (uniqueConstraints != null) {
olapTable.setUniqueConstraints(uniqueConstraints);
}
List<ForeignKeyConstraint> foreignKeyConstraints =
PropertyAnalyzer.analyzeForeignKeyConstraint(properties, db, olapTable);
if (foreignKeyConstraints != null) {
olapTable.setForeignKeyConstraints(foreignKeyConstraints);
}
}
} | class OlapTableFactory implements AbstractTableFactory {
private static final Logger LOG = LogManager.getLogger(OlapTableFactory.class);
public static final OlapTableFactory INSTANCE = new OlapTableFactory();
private OlapTableFactory() {
}
@Override
@NotNull
private void processConstraint(
Database db, OlapTable olapTable, Map<String, String> properties) throws AnalysisException {
List<UniqueConstraint> uniqueConstraints = PropertyAnalyzer.analyzeUniqueConstraint(properties, db, olapTable);
if (uniqueConstraints != null) {
olapTable.setUniqueConstraints(uniqueConstraints);
}
List<ForeignKeyConstraint> foreignKeyConstraints =
PropertyAnalyzer.analyzeForeignKeyConstraint(properties, db, olapTable);
if (foreignKeyConstraints != null) {
olapTable.setForeignKeyConstraints(foreignKeyConstraints);
}
}
} |
It can be done. But that would require altering all the tests that use this constant value. Will that be fine if we do that? | public void testEmptyLzoProgress() throws IOException {
File tmpFile = tmpFolder.newFile("empty.lzo_deflate");
String filename = tmpFile.toPath().toString();
writeFile(tmpFile, new byte[0], CompressionMode.LZO);
PipelineOptions options = PipelineOptionsFactory.create();
CompressedSource<Byte> source =
CompressedSource.from(new ByteSource(filename, 1)).withDecompression(CompressionMode.LZO);
try (BoundedReader<Byte> readerOrig = source.createReader(options)) {
assertThat(readerOrig, instanceOf(CompressedReader.class));
CompressedReader<Byte> reader = (CompressedReader<Byte>) readerOrig;
assertEquals(0.0, reader.getFractionConsumed(), 1e-6);
assertEquals(0, reader.getSplitPointsConsumed());
assertEquals(1, reader.getSplitPointsRemaining());
assertFalse(reader.start());
assertEquals(1.0, reader.getFractionConsumed(), 1e-6);
assertEquals(0, reader.getSplitPointsConsumed());
assertEquals(0, reader.getSplitPointsRemaining());
}
} | assertEquals(0.0, reader.getFractionConsumed(), 1e-6); | public void testEmptyLzoProgress() throws IOException {
File tmpFile = tmpFolder.newFile("empty.lzo_deflate");
String filename = tmpFile.toPath().toString();
writeFile(tmpFile, new byte[0], CompressionMode.LZO);
PipelineOptions options = PipelineOptionsFactory.create();
CompressedSource<Byte> source =
CompressedSource.from(new ByteSource(filename, 1)).withDecompression(CompressionMode.LZO);
try (BoundedReader<Byte> readerOrig = source.createReader(options)) {
assertThat(readerOrig, instanceOf(CompressedReader.class));
CompressedReader<Byte> reader = (CompressedReader<Byte>) readerOrig;
assertEquals(0.0, reader.getFractionConsumed(), delta);
assertEquals(0, reader.getSplitPointsConsumed());
assertEquals(1, reader.getSplitPointsRemaining());
assertFalse(reader.start());
assertEquals(1.0, reader.getFractionConsumed(), delta);
assertEquals(0, reader.getSplitPointsConsumed());
assertEquals(0, reader.getSplitPointsRemaining());
}
} | class ExtractIndexFromTimestamp extends DoFn<Byte, KV<Long, Byte>> {
@ProcessElement
public void processElement(ProcessContext context) {
context.output(KV.of(context.timestamp().getMillis(), context.element()));
}
} | class ExtractIndexFromTimestamp extends DoFn<Byte, KV<Long, Byte>> {
@ProcessElement
public void processElement(ProcessContext context) {
context.output(KV.of(context.timestamp().getMillis(), context.element()));
}
} |
Was this meant to be included in this PR? Doesn't seem related | public NativeImageInvokerInfo build() {
List<String> nativeImageArgs = new ArrayList<>();
boolean enableSslNative = false;
boolean enableAllSecurityServices = nativeConfig.enableAllSecurityServices;
boolean inlineBeforeAnalysis = nativeConfig.inlineBeforeAnalysis;
boolean addAllCharsets = nativeConfig.addAllCharsets;
boolean enableHttpsUrlHandler = nativeConfig.enableHttpsUrlHandler;
for (NativeImageSystemPropertyBuildItem prop : nativeImageProperties) {
if (prop.getKey().equals("quarkus.ssl.native") && prop.getValue() != null) {
enableSslNative = Boolean.parseBoolean(prop.getValue());
} else if (prop.getKey().equals("quarkus.jni.enable") && prop.getValue().equals("false")) {
log.warn("Your application is setting the deprecated 'quarkus.jni.enable' configuration key to false."
+ " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it"
+ " will be removed in a future Quarkus version.");
} else if (prop.getKey().equals("quarkus.native.enable-all-security-services") && prop.getValue() != null) {
enableAllSecurityServices |= Boolean.parseBoolean(prop.getValue());
} else if (prop.getKey().equals("quarkus.native.enable-all-charsets") && prop.getValue() != null) {
addAllCharsets |= Boolean.parseBoolean(prop.getValue());
} else if (prop.getKey().equals("quarkus.native.inline-before-analysis") && prop.getValue() != null) {
inlineBeforeAnalysis |= Boolean.parseBoolean(prop.getValue());
} else {
if (prop.getValue() == null) {
nativeImageArgs.add("-J-D" + prop.getKey());
} else {
nativeImageArgs.add("-J-D" + prop.getKey() + "=" + prop.getValue());
}
}
}
if (nativeConfig.userLanguage.isPresent()) {
nativeImageArgs.add("-J-Duser.language=" + nativeConfig.userLanguage.get());
}
if (nativeConfig.userCountry.isPresent()) {
nativeImageArgs.add("-J-Duser.country=" + nativeConfig.userCountry.get());
}
nativeImageArgs.add("-J-Dfile.encoding=" + nativeConfig.fileEncoding);
if (enableSslNative) {
enableHttpsUrlHandler = true;
enableAllSecurityServices = true;
}
handleAdditionalProperties(nativeConfig, nativeImageArgs, isContainerBuild, outputDir);
nativeImageArgs.add(
"-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime");
nativeImageArgs.add("-H:+JNI");
nativeImageArgs.add("-H:+AllowFoldMethods");
nativeImageArgs.add("-jar");
nativeImageArgs.add(runnerJarName);
if (nativeConfig.enableFallbackImages) {
nativeImageArgs.add("-H:FallbackThreshold=5");
} else {
nativeImageArgs.add("-H:FallbackThreshold=0");
}
if (nativeConfig.reportErrorsAtRuntime) {
nativeImageArgs.add("-H:+ReportUnsupportedElementsAtRuntime");
}
if (nativeConfig.reportExceptionStackTraces) {
nativeImageArgs.add("-H:+ReportExceptionStackTraces");
}
if (nativeConfig.debug.enabled) {
nativeImageArgs.add("-g");
nativeImageArgs.add("-H:DebugInfoSourceSearchPath=" + APP_SOURCES);
}
if (nativeConfig.debugBuildProcess) {
nativeImageArgs
.add("-J-Xrunjdwp:transport=dt_socket,address=" + DEBUG_BUILD_PROCESS_PORT + ",server=y,suspend=y");
}
if (nativeConfig.enableReports) {
nativeImageArgs.add("-H:+PrintAnalysisCallTree");
}
if (nativeConfig.dumpProxies) {
nativeImageArgs.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true");
}
if (nativeConfig.nativeImageXmx.isPresent()) {
nativeImageArgs.add("-J-Xmx" + nativeConfig.nativeImageXmx.get());
}
List<String> protocols = new ArrayList<>(2);
if (nativeConfig.enableHttpUrlHandler) {
protocols.add("http");
}
if (enableHttpsUrlHandler) {
protocols.add("https");
}
if (addAllCharsets) {
nativeImageArgs.add("-H:+AddAllCharsets");
} else {
nativeImageArgs.add("-H:-AddAllCharsets");
}
if (!protocols.isEmpty()) {
nativeImageArgs.add("-H:EnableURLProtocols=" + String.join(",", protocols));
}
if (enableAllSecurityServices && graalVMVersion.isOlderThan(GraalVM.Version.VERSION_21_1)) {
nativeImageArgs.add("--enable-all-security-services");
}
if (inlineBeforeAnalysis) {
if (graalVMVersion.isNewerThan(GraalVM.Version.VERSION_20_3)) {
nativeImageArgs.add("-H:+InlineBeforeAnalysis");
} else {
log.warn(
"The InlineBeforeAnalysis feature is not supported in GraalVM versions prior to 21.0.0."
+ " InlineBeforeAnalysis will thus not be enabled, please consider using a newer"
+ " GraalVM version if your application relies on this feature.");
}
}
if (!noPIE.isEmpty()) {
nativeImageArgs.add("-H:NativeLinkerOption=" + noPIE);
}
if (!nativeConfig.enableIsolates) {
nativeImageArgs.add("-H:-SpawnIsolates");
}
if (!nativeConfig.enableJni) {
log.warn(
"Your application is setting the deprecated 'quarkus.native.enable-jni' configuration key to false."
+ " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it"
+ " will be removed in a future Quarkus version.");
}
if (nativeConfig.enableServer) {
log.warn(
"Your application is setting the deprecated 'quarkus.native.enable-server' configuration key to true."
+ " Please consider removing this configuration key as it is ignored"
+ " (The Native image build server is always disabled) and it"
+ " will be removed in a future Quarkus version.");
}
if (nativeConfig.enableVmInspection) {
nativeImageArgs.add("-H:+AllowVMInspection");
}
if (nativeConfig.autoServiceLoaderRegistration) {
nativeImageArgs.add("-H:+UseServiceLoaderFeature");
nativeImageArgs.add("-H:+TraceServiceLoaderFeature");
} else {
nativeImageArgs.add("-H:-UseServiceLoaderFeature");
}
if (nativeConfig.fullStackTraces) {
nativeImageArgs.add("-H:+StackTrace");
} else {
nativeImageArgs.add("-H:-StackTrace");
}
if (nativeConfig.enableDashboardDump) {
nativeImageArgs.add("-H:DashboardDump=" + outputTargetBuildItem.getBaseName() + "_dashboard.dump");
nativeImageArgs.add("-H:+DashboardAll");
}
if (graalVMVersion.isNewerThan(GraalVM.Version.VERSION_21_1)) {
nativeImageArgs.add("-H:-ParseOnce");
if (!nativeImageSecurityProviders.isEmpty()) {
String additionalSecurityProviders = nativeImageSecurityProviders.stream()
.map(NativeImageSecurityProviderBuildItem::getSecurityProvider)
.collect(Collectors.joining(","));
nativeImageArgs.add("-H:AdditionalSecurityProviders=" + additionalSecurityProviders);
}
for (ExcludeConfigBuildItem excludeConfig : excludeConfigs) {
nativeImageArgs.add("--exclude-config");
nativeImageArgs.add(excludeConfig.getJarFile());
nativeImageArgs.add(excludeConfig.getResourceName());
}
}
nativeImageArgs.add(nativeImageName);
return new NativeImageInvokerInfo(nativeImageArgs);
} | if (!nativeImageSecurityProviders.isEmpty()) { | public NativeImageInvokerInfo build() {
List<String> nativeImageArgs = new ArrayList<>();
boolean enableSslNative = false;
boolean enableAllSecurityServices = nativeConfig.enableAllSecurityServices;
boolean inlineBeforeAnalysis = nativeConfig.inlineBeforeAnalysis;
boolean addAllCharsets = nativeConfig.addAllCharsets;
boolean enableHttpsUrlHandler = nativeConfig.enableHttpsUrlHandler;
for (NativeImageSystemPropertyBuildItem prop : nativeImageProperties) {
if (prop.getKey().equals("quarkus.ssl.native") && prop.getValue() != null) {
enableSslNative = Boolean.parseBoolean(prop.getValue());
} else if (prop.getKey().equals("quarkus.jni.enable") && prop.getValue().equals("false")) {
log.warn("Your application is setting the deprecated 'quarkus.jni.enable' configuration key to false."
+ " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it"
+ " will be removed in a future Quarkus version.");
} else if (prop.getKey().equals("quarkus.native.enable-all-security-services") && prop.getValue() != null) {
enableAllSecurityServices |= Boolean.parseBoolean(prop.getValue());
} else if (prop.getKey().equals("quarkus.native.enable-all-charsets") && prop.getValue() != null) {
addAllCharsets |= Boolean.parseBoolean(prop.getValue());
} else if (prop.getKey().equals("quarkus.native.inline-before-analysis") && prop.getValue() != null) {
inlineBeforeAnalysis |= Boolean.parseBoolean(prop.getValue());
} else {
if (prop.getValue() == null) {
nativeImageArgs.add("-J-D" + prop.getKey());
} else {
nativeImageArgs.add("-J-D" + prop.getKey() + "=" + prop.getValue());
}
}
}
if (nativeConfig.userLanguage.isPresent()) {
nativeImageArgs.add("-J-Duser.language=" + nativeConfig.userLanguage.get());
}
if (nativeConfig.userCountry.isPresent()) {
nativeImageArgs.add("-J-Duser.country=" + nativeConfig.userCountry.get());
}
nativeImageArgs.add("-J-Dfile.encoding=" + nativeConfig.fileEncoding);
if (enableSslNative) {
enableHttpsUrlHandler = true;
enableAllSecurityServices = true;
}
handleAdditionalProperties(nativeConfig, nativeImageArgs, isContainerBuild, outputDir);
nativeImageArgs.add(
"-H:InitialCollectionPolicy=com.oracle.svm.core.genscavenge.CollectionPolicy$BySpaceAndTime");
nativeImageArgs.add("-H:+JNI");
nativeImageArgs.add("-H:+AllowFoldMethods");
nativeImageArgs.add("-jar");
nativeImageArgs.add(runnerJarName);
if (nativeConfig.enableFallbackImages) {
nativeImageArgs.add("-H:FallbackThreshold=5");
} else {
nativeImageArgs.add("-H:FallbackThreshold=0");
}
if (nativeConfig.reportErrorsAtRuntime) {
nativeImageArgs.add("-H:+ReportUnsupportedElementsAtRuntime");
}
if (nativeConfig.reportExceptionStackTraces) {
nativeImageArgs.add("-H:+ReportExceptionStackTraces");
}
if (nativeConfig.debug.enabled) {
nativeImageArgs.add("-g");
nativeImageArgs.add("-H:DebugInfoSourceSearchPath=" + APP_SOURCES);
}
if (nativeConfig.debugBuildProcess) {
nativeImageArgs
.add("-J-Xrunjdwp:transport=dt_socket,address=" + DEBUG_BUILD_PROCESS_PORT + ",server=y,suspend=y");
}
if (nativeConfig.enableReports) {
nativeImageArgs.add("-H:+PrintAnalysisCallTree");
}
if (nativeConfig.dumpProxies) {
nativeImageArgs.add("-Dsun.misc.ProxyGenerator.saveGeneratedFiles=true");
}
if (nativeConfig.nativeImageXmx.isPresent()) {
nativeImageArgs.add("-J-Xmx" + nativeConfig.nativeImageXmx.get());
}
List<String> protocols = new ArrayList<>(2);
if (nativeConfig.enableHttpUrlHandler) {
protocols.add("http");
}
if (enableHttpsUrlHandler) {
protocols.add("https");
}
if (addAllCharsets) {
nativeImageArgs.add("-H:+AddAllCharsets");
} else {
nativeImageArgs.add("-H:-AddAllCharsets");
}
if (!protocols.isEmpty()) {
nativeImageArgs.add("-H:EnableURLProtocols=" + String.join(",", protocols));
}
if (enableAllSecurityServices && graalVMVersion.isOlderThan(GraalVM.Version.VERSION_21_1)) {
nativeImageArgs.add("--enable-all-security-services");
}
if (inlineBeforeAnalysis) {
if (graalVMVersion.isNewerThan(GraalVM.Version.VERSION_20_3)) {
nativeImageArgs.add("-H:+InlineBeforeAnalysis");
} else {
log.warn(
"The InlineBeforeAnalysis feature is not supported in GraalVM versions prior to 21.0.0."
+ " InlineBeforeAnalysis will thus not be enabled, please consider using a newer"
+ " GraalVM version if your application relies on this feature.");
}
}
if (!noPIE.isEmpty()) {
nativeImageArgs.add("-H:NativeLinkerOption=" + noPIE);
}
if (!nativeConfig.enableIsolates) {
nativeImageArgs.add("-H:-SpawnIsolates");
}
if (!nativeConfig.enableJni) {
log.warn(
"Your application is setting the deprecated 'quarkus.native.enable-jni' configuration key to false."
+ " Please consider removing this configuration key as it is ignored (JNI is always enabled) and it"
+ " will be removed in a future Quarkus version.");
}
if (nativeConfig.enableServer) {
log.warn(
"Your application is setting the deprecated 'quarkus.native.enable-server' configuration key to true."
+ " Please consider removing this configuration key as it is ignored"
+ " (The Native image build server is always disabled) and it"
+ " will be removed in a future Quarkus version.");
}
if (nativeConfig.enableVmInspection) {
nativeImageArgs.add("-H:+AllowVMInspection");
}
if (nativeConfig.autoServiceLoaderRegistration) {
nativeImageArgs.add("-H:+UseServiceLoaderFeature");
nativeImageArgs.add("-H:+TraceServiceLoaderFeature");
} else {
nativeImageArgs.add("-H:-UseServiceLoaderFeature");
}
if (nativeConfig.fullStackTraces) {
nativeImageArgs.add("-H:+StackTrace");
} else {
nativeImageArgs.add("-H:-StackTrace");
}
if (nativeConfig.enableDashboardDump) {
nativeImageArgs.add("-H:DashboardDump=" + outputTargetBuildItem.getBaseName() + "_dashboard.dump");
nativeImageArgs.add("-H:+DashboardAll");
}
if (graalVMVersion.isNewerThan(GraalVM.Version.VERSION_21_1)) {
nativeImageArgs.add("-H:-ParseOnce");
if (nativeImageSecurityProviders != null && !nativeImageSecurityProviders.isEmpty()) {
String additionalSecurityProviders = nativeImageSecurityProviders.stream()
.map(p -> p.getSecurityProvider())
.collect(Collectors.joining(","));
nativeImageArgs.add("-H:AdditionalSecurityProviders=" + additionalSecurityProviders);
}
for (ExcludeConfigBuildItem excludeConfig : excludeConfigs) {
nativeImageArgs.add("--exclude-config");
nativeImageArgs.add(excludeConfig.getJarFile());
nativeImageArgs.add(excludeConfig.getResourceName());
}
}
nativeImageArgs.add(nativeImageName);
return new NativeImageInvokerInfo(nativeImageArgs);
} | class Builder {
private NativeConfig nativeConfig;
private OutputTargetBuildItem outputTargetBuildItem;
private List<NativeImageSystemPropertyBuildItem> nativeImageProperties;
private List<ExcludeConfigBuildItem> excludeConfigs;
private List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders;
private Path outputDir;
private String runnerJarName;
private String noPIE = "";
private boolean isContainerBuild = false;
private GraalVM.Version graalVMVersion = GraalVM.Version.UNVERSIONED;
private String nativeImageName;
public Builder setNativeConfig(NativeConfig nativeConfig) {
this.nativeConfig = nativeConfig;
return this;
}
public Builder setOutputTargetBuildItem(OutputTargetBuildItem outputTargetBuildItem) {
this.outputTargetBuildItem = outputTargetBuildItem;
return this;
}
public Builder setNativeImageProperties(List<NativeImageSystemPropertyBuildItem> nativeImageProperties) {
this.nativeImageProperties = nativeImageProperties;
return this;
}
public Builder setExcludeConfigs(List<ExcludeConfigBuildItem> excludeConfigs) {
this.excludeConfigs = excludeConfigs;
return this;
}
public Builder setNativeImageSecurityProviders(
List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders) {
this.nativeImageSecurityProviders = nativeImageSecurityProviders;
return this;
}
public Builder setOutputDir(Path outputDir) {
this.outputDir = outputDir;
return this;
}
public Builder setRunnerJarName(String runnerJarName) {
this.runnerJarName = runnerJarName;
return this;
}
public Builder setNoPIE(String noPIE) {
this.noPIE = noPIE;
return this;
}
public Builder setContainerBuild(boolean containerBuild) {
isContainerBuild = containerBuild;
return this;
}
public Builder setGraalVMVersion(GraalVM.Version graalVMVersion) {
this.graalVMVersion = graalVMVersion;
return this;
}
public Builder setNativeImageName(String nativeImageName) {
this.nativeImageName = nativeImageName;
return this;
}
private void handleAdditionalProperties(NativeConfig nativeConfig, List<String> command, boolean isContainerBuild,
Path outputDir) {
if (nativeConfig.additionalBuildArgs.isPresent()) {
List<String> strings = nativeConfig.additionalBuildArgs.get();
for (String buildArg : strings) {
String trimmedBuildArg = buildArg.trim();
if (trimmedBuildArg.contains(TRUST_STORE_SYSTEM_PROPERTY_MARKER) && isContainerBuild) {
/*
* When the native binary is being built with a docker container, because a volume is created,
* we need to copy the trustStore file into the output directory (which is the root of volume)
* and change the value of 'javax.net.ssl.trustStore' property to point to this value
*
* TODO: we might want to introduce a dedicated property in order to overcome this ugliness
*/
int index = trimmedBuildArg.indexOf(TRUST_STORE_SYSTEM_PROPERTY_MARKER);
if (trimmedBuildArg.length() > index + 2) {
String configuredTrustStorePath = trimmedBuildArg
.substring(index + TRUST_STORE_SYSTEM_PROPERTY_MARKER.length());
try {
IoUtils.copy(Paths.get(configuredTrustStorePath),
outputDir.resolve(MOVED_TRUST_STORE_NAME));
command.add(trimmedBuildArg.substring(0, index) + TRUST_STORE_SYSTEM_PROPERTY_MARKER
+ CONTAINER_BUILD_VOLUME_PATH + "/" + MOVED_TRUST_STORE_NAME);
} catch (IOException e) {
throw new UncheckedIOException("Unable to copy trustStore file '" + configuredTrustStorePath
+ "' to volume root directory '" + outputDir.toAbsolutePath().toString() + "'", e);
}
}
} else {
command.add(trimmedBuildArg);
}
}
}
}
} | class Builder {
private NativeConfig nativeConfig;
private OutputTargetBuildItem outputTargetBuildItem;
private List<NativeImageSystemPropertyBuildItem> nativeImageProperties;
private List<ExcludeConfigBuildItem> excludeConfigs;
private List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders;
private Path outputDir;
private String runnerJarName;
private String noPIE = "";
private boolean isContainerBuild = false;
private GraalVM.Version graalVMVersion = GraalVM.Version.UNVERSIONED;
private String nativeImageName;
public Builder setNativeConfig(NativeConfig nativeConfig) {
this.nativeConfig = nativeConfig;
return this;
}
public Builder setOutputTargetBuildItem(OutputTargetBuildItem outputTargetBuildItem) {
this.outputTargetBuildItem = outputTargetBuildItem;
return this;
}
public Builder setNativeImageProperties(List<NativeImageSystemPropertyBuildItem> nativeImageProperties) {
this.nativeImageProperties = nativeImageProperties;
return this;
}
public Builder setExcludeConfigs(List<ExcludeConfigBuildItem> excludeConfigs) {
this.excludeConfigs = excludeConfigs;
return this;
}
public Builder setNativeImageSecurityProviders(
List<NativeImageSecurityProviderBuildItem> nativeImageSecurityProviders) {
this.nativeImageSecurityProviders = nativeImageSecurityProviders;
return this;
}
public Builder setOutputDir(Path outputDir) {
this.outputDir = outputDir;
return this;
}
public Builder setRunnerJarName(String runnerJarName) {
this.runnerJarName = runnerJarName;
return this;
}
public Builder setNoPIE(String noPIE) {
this.noPIE = noPIE;
return this;
}
public Builder setContainerBuild(boolean containerBuild) {
isContainerBuild = containerBuild;
return this;
}
public Builder setGraalVMVersion(GraalVM.Version graalVMVersion) {
this.graalVMVersion = graalVMVersion;
return this;
}
public Builder setNativeImageName(String nativeImageName) {
this.nativeImageName = nativeImageName;
return this;
}
private void handleAdditionalProperties(NativeConfig nativeConfig, List<String> command, boolean isContainerBuild,
Path outputDir) {
if (nativeConfig.additionalBuildArgs.isPresent()) {
List<String> strings = nativeConfig.additionalBuildArgs.get();
for (String buildArg : strings) {
String trimmedBuildArg = buildArg.trim();
if (trimmedBuildArg.contains(TRUST_STORE_SYSTEM_PROPERTY_MARKER) && isContainerBuild) {
/*
* When the native binary is being built with a docker container, because a volume is created,
* we need to copy the trustStore file into the output directory (which is the root of volume)
* and change the value of 'javax.net.ssl.trustStore' property to point to this value
*
* TODO: we might want to introduce a dedicated property in order to overcome this ugliness
*/
int index = trimmedBuildArg.indexOf(TRUST_STORE_SYSTEM_PROPERTY_MARKER);
if (trimmedBuildArg.length() > index + 2) {
String configuredTrustStorePath = trimmedBuildArg
.substring(index + TRUST_STORE_SYSTEM_PROPERTY_MARKER.length());
try {
IoUtils.copy(Paths.get(configuredTrustStorePath),
outputDir.resolve(MOVED_TRUST_STORE_NAME));
command.add(trimmedBuildArg.substring(0, index) + TRUST_STORE_SYSTEM_PROPERTY_MARKER
+ CONTAINER_BUILD_VOLUME_PATH + "/" + MOVED_TRUST_STORE_NAME);
} catch (IOException e) {
throw new UncheckedIOException("Unable to copy trustStore file '" + configuredTrustStorePath
+ "' to volume root directory '" + outputDir.toAbsolutePath().toString() + "'", e);
}
}
} else {
command.add(trimmedBuildArg);
}
}
}
}
} |
I think this should be changed to get/setOptionalParticipation() | public By withOptionalParticipation() {
return toBuilder().setOuterJoinParticipation(true).build();
} | return toBuilder().setOuterJoinParticipation(true).build(); | public By withOptionalParticipation() {
return toBuilder().setOptionalParticipation(true).build();
} | class Builder {
abstract Builder setFieldAccessDescriptor(FieldAccessDescriptor fieldAccessDescriptor);
abstract Builder setOuterJoinParticipation(boolean outerJoinParticipation);
abstract By build();
} | class Builder {
abstract Builder setFieldAccessDescriptor(FieldAccessDescriptor fieldAccessDescriptor);
abstract Builder setOptionalParticipation(boolean optionalParticipation);
abstract By build();
} |
I just want to have this check in one place so it's easy to change if needed in the future. I could change this line to `configserverConfig.cloud().equals("aws") || configserverConfig.cloud().equals("gcp")` if you think that's better? | public Zone(ConfigserverConfig configserverConfig, CloudConfig cloudConfig) {
this(Cloud.builder()
.name(CloudName.from(configserverConfig.cloud()))
.dynamicProvisioning(cloudConfig.dynamicProvisioning())
.allowHostSharing(cloudConfig.allowHostSharing())
.allowEnclave(cloudConfig.dynamicProvisioning())
.requireAccessControl(cloudConfig.requireAccessControl())
.account(CloudAccount.from(cloudConfig.account()))
.build(),
SystemName.from(configserverConfig.system()),
Environment.from(configserverConfig.environment()),
RegionName.from(configserverConfig.region()));
} | .allowEnclave(cloudConfig.dynamicProvisioning()) | public Zone(ConfigserverConfig configserverConfig, CloudConfig cloudConfig) {
this(Cloud.builder()
.name(CloudName.from(configserverConfig.cloud()))
.dynamicProvisioning(cloudConfig.dynamicProvisioning())
.allowHostSharing(cloudConfig.allowHostSharing())
.allowEnclave(cloudConfig.dynamicProvisioning())
.requireAccessControl(cloudConfig.requireAccessControl())
.account(CloudAccount.from(cloudConfig.account()))
.build(),
SystemName.from(configserverConfig.system()),
Environment.from(configserverConfig.environment()),
RegionName.from(configserverConfig.region()));
} | class Zone {
private final Cloud cloud;
private final SystemName systemName;
private final Environment environment;
private final RegionName region;
@Inject
/** Create from environment and region. Use for testing. */
public Zone(Environment environment, RegionName region) {
this(SystemName.defaultSystem(), environment, region);
}
/** Create from system, environment and region. Use for testing. */
public Zone(SystemName systemName, Environment environment, RegionName region) {
this(Cloud.defaultCloud(), systemName, environment, region);
}
/** Create from cloud, system, environment and region. Also used for testing. */
public Zone(Cloud cloud, SystemName systemName, Environment environment, RegionName region) {
this.cloud = cloud;
this.systemName = systemName;
this.environment = environment;
this.region = region;
}
public Cloud getCloud() {
return cloud();
}
/** Returns the current cloud */
public Cloud cloud() { return cloud; }
/** Returns the current system */
public SystemName system() { return systemName; }
/** Returns the current environment */
public Environment environment() {
return environment;
}
/** Returns the current region */
public RegionName region() {
return region;
}
/** Returns the string "environment.region" */
public String systemLocalValue() { return environment + "." + region; }
/** Do not use */
public static Zone defaultZone() {
return new Zone(Cloud.defaultCloud(), SystemName.defaultSystem(), Environment.defaultEnvironment(), RegionName.defaultName());
}
@Override
public String toString() {
return "zone " + environment + "." + region;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Zone)) return false;
Zone zone = (Zone) o;
return environment == zone.environment &&
Objects.equals(region, zone.region);
}
@Override
public int hashCode() {
return Objects.hash(environment, region);
}
} | class Zone {
private final Cloud cloud;
private final SystemName systemName;
private final Environment environment;
private final RegionName region;
@Inject
/** Create from environment and region. Use for testing. */
public Zone(Environment environment, RegionName region) {
this(SystemName.defaultSystem(), environment, region);
}
/** Create from system, environment and region. Use for testing. */
public Zone(SystemName systemName, Environment environment, RegionName region) {
this(Cloud.defaultCloud(), systemName, environment, region);
}
/** Create from cloud, system, environment and region. Also used for testing. */
public Zone(Cloud cloud, SystemName systemName, Environment environment, RegionName region) {
this.cloud = cloud;
this.systemName = systemName;
this.environment = environment;
this.region = region;
}
public Cloud getCloud() {
return cloud();
}
/** Returns the current cloud */
public Cloud cloud() { return cloud; }
/** Returns the current system */
public SystemName system() { return systemName; }
/** Returns the current environment */
public Environment environment() {
return environment;
}
/** Returns the current region */
public RegionName region() {
return region;
}
/** Returns the string "environment.region" */
public String systemLocalValue() { return environment + "." + region; }
/** Do not use */
public static Zone defaultZone() {
return new Zone(Cloud.defaultCloud(), SystemName.defaultSystem(), Environment.defaultEnvironment(), RegionName.defaultName());
}
@Override
public String toString() {
return "zone " + environment + "." + region;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Zone)) return false;
Zone zone = (Zone) o;
return environment == zone.environment &&
Objects.equals(region, zone.region);
}
@Override
public int hashCode() {
return Objects.hash(environment, region);
}
} |
Replace this with a switch case | private void checkDuplicateVarRefs(List<BLangExpression> varRefs, Set<BSymbol> symbols) {
for (BLangExpression varRef : varRefs) {
NodeKind kind = varRef.getKind();
if (kind == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) varRef;
if (simpleVarRef.symbol != null && !symbols.add(simpleVarRef.symbol)) {
this.dlog.error(varRef.pos, DiagnosticErrorCode.DUPLICATE_VARIABLE_IN_BINDING_PATTERN,
simpleVarRef.symbol);
}
} else if (kind == NodeKind.RECORD_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangRecordVarRef) varRef), symbols);
} else if (kind == NodeKind.ERROR_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangErrorVarRef) varRef), symbols);
} else if (kind == NodeKind.TUPLE_VARIABLE_REF) {
checkDuplicateVarRefs(getVarRefs((BLangTupleVarRef) varRef), symbols);
}
}
} | if (kind == NodeKind.SIMPLE_VARIABLE_REF) { | private void checkDuplicateVarRefs(List<BLangExpression> varRefs, Set<BSymbol> symbols) {
for (BLangExpression varRef : varRefs) {
NodeKind kind = varRef.getKind();
switch (kind) {
case SIMPLE_VARIABLE_REF:
BLangSimpleVarRef simpleVarRef = (BLangSimpleVarRef) varRef;
if (simpleVarRef.symbol != null && !symbols.add(simpleVarRef.symbol)) {
this.dlog.error(varRef.pos, DiagnosticErrorCode.DUPLICATE_VARIABLE_IN_BINDING_PATTERN,
simpleVarRef.symbol);
}
break;
case RECORD_VARIABLE_REF:
checkDuplicateVarRefs(getVarRefs((BLangRecordVarRef) varRef), symbols);
break;
case ERROR_VARIABLE_REF:
checkDuplicateVarRefs(getVarRefs((BLangErrorVarRef) varRef), symbols);
break;
case TUPLE_VARIABLE_REF:
checkDuplicateVarRefs(getVarRefs((BLangTupleVarRef) varRef), symbols);
break;
default:
}
}
} | class CodeAnalyzer extends SimpleBLangNodeAnalyzer<CodeAnalyzer.AnalyzerData> {
private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY =
new CompilerContext.Key<>();
private final SymbolResolver symResolver;
private final SymbolTable symTable;
private final Types types;
private final BLangDiagnosticLog dlog;
private final TypeChecker typeChecker;
private final Names names;
private final ReachabilityAnalyzer reachabilityAnalyzer;
public static CodeAnalyzer getInstance(CompilerContext context) {
CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY);
if (codeGenerator == null) {
codeGenerator = new CodeAnalyzer(context);
}
return codeGenerator;
}
public CodeAnalyzer(CompilerContext context) {
context.put(CODE_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeChecker = TypeChecker.getInstance(context);
this.names = Names.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.reachabilityAnalyzer = ReachabilityAnalyzer.getInstance(context);
}
public BLangPackage analyze(BLangPackage pkgNode) {
final AnalyzerData data = new AnalyzerData();
visitNode(pkgNode, data);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode, AnalyzerData data) {
this.dlog.setCurrentPackageId(pkgNode.packageID);
if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) {
return;
}
data.parent = pkgNode;
data.env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
analyzeTopLevelNodes(pkgNode, data);
pkgNode.getTestablePkgs().forEach(testablePackage -> visitNode(testablePackage, data));
}
@Override
public void visit(BLangTestablePackage node, AnalyzerData data) {
visit((BLangPackage) node, data);
}
private void analyzeTopLevelNodes(BLangPackage pkgNode, AnalyzerData data) {
List<TopLevelNode> topLevelNodes = pkgNode.topLevelNodes;
for (int i = 0; i < topLevelNodes.size(); i++) {
analyzeNode((BLangNode) topLevelNodes.get(i), data);
}
pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE);
}
@Override
public void analyzeNode(BLangNode node, AnalyzerData data) {
SymbolEnv prevEnv = data.env;
BLangNode parent = data.parent;
node.parent = parent;
data.parent = node;
visitNode(node, data);
data.parent = parent;
data.env = prevEnv;
}
private void analyzeTypeNode(BLangType node, AnalyzerData data) {
if (node == null) {
return;
}
analyzeNode(node, data);
}
@Override
public void visit(BLangCompilationUnit compUnitNode, AnalyzerData data) {
compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, data));
}
@Override
public void visit(BLangTypeDefinition typeDefinition, AnalyzerData data) {
analyzeTypeNode(typeDefinition.typeNode, data);
typeDefinition.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangClassDefinition classDefinition, AnalyzerData data) {
data.env = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, data.env);
for (BLangSimpleVariable field : classDefinition.fields) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
data.defaultValueState = DefaultValueState.OBJECT_FIELD_INITIALIZER;
analyzeNode(field, data);
data.defaultValueState = prevDefaultValueState;
}
List<BLangFunction> bLangFunctionList = new ArrayList<>(classDefinition.functions);
if (classDefinition.initFunction != null) {
bLangFunctionList.add(classDefinition.initFunction);
}
bLangFunctionList.sort(Comparator.comparingInt(function -> function.pos.lineRange().startLine().line()));
for (BLangFunction function : bLangFunctionList) {
analyzeNode(function, data);
}
classDefinition.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangObjectConstructorExpression objectConstructorExpression, AnalyzerData data) {
visit(objectConstructorExpression.typeInit, data);
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef, AnalyzerData data) {
analyzeNode(bLangTupleVariableDef.var, data);
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef, AnalyzerData data) {
analyzeNode(bLangRecordVariableDef.var, data);
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef, AnalyzerData data) {
analyzeNode(bLangErrorVariableDef.errorVariable, data);
}
@Override
public void visit(BLangResourceFunction funcNode, AnalyzerData data) {
visit((BLangFunction) funcNode, data);
}
@Override
public void visit(BLangFunction funcNode, AnalyzerData data) {
validateParams(funcNode, data);
analyzeNode(funcNode.returnTypeNode, data);
boolean isLambda = funcNode.flagSet.contains(Flag.LAMBDA);
if (isLambda) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
funcNode.symbol.params.forEach(symbol -> analyzeExportableTypeRef(funcNode.symbol, symbol.type.tsymbol,
true,
funcNode.pos));
if (funcNode.symbol.restParam != null) {
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.restParam.type.tsymbol, true,
funcNode.restParam.pos);
}
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.retType.tsymbol, true,
funcNode.returnTypeNode.pos);
}
if (MAIN_FUNCTION_NAME.equals(funcNode.name.value)) {
new MainFunctionValidator(types, dlog).validateMainFunction(funcNode);
}
this.validateModuleInitFunction(funcNode);
try {
this.initNewWorkerActionSystem(data);
data.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME,
funcNode.pos,
funcNode);
this.visitFunction(funcNode, data);
data.workerActionSystemStack.peek().endWorkerActionStateMachine();
} finally {
this.finalizeCurrentWorkerActionSystem(data);
}
funcNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
validateNamedWorkerUniqueReferences(data);
}
private void validateNamedWorkerUniqueReferences(AnalyzerData data) {
for (var nodes : data.workerReferences.values()) {
if (nodes.size() > 1) {
for (BLangNode node: nodes) {
dlog.error(node.pos, DiagnosticErrorCode.ILLEGAL_WORKER_REFERENCE_AS_A_VARIABLE_REFERENCE, node);
}
}
}
data.workerReferences.clear();
}
private void validateParams(BLangFunction funcNode, AnalyzerData data) {
for (BLangSimpleVariable parameter : funcNode.requiredParams) {
analyzeNode(parameter, data);
}
if (funcNode.restParam != null) {
analyzeNode(funcNode.restParam, data);
}
}
private void visitFunction(BLangFunction funcNode, AnalyzerData data) {
data.env = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, data.env);
data.returnWithinTransactionCheckStack.push(true);
data.returnTypes.push(new LinkedHashSet<>());
data.transactionalFuncCheckStack.push(funcNode.flagSet.contains(Flag.TRANSACTIONAL));
if (Symbols.isNative(funcNode.symbol)) {
return;
}
if (isPublicInvokableNode(funcNode)) {
analyzeNode(funcNode.returnTypeNode, data);
}
/* the body can be null in the case of Object type function declarations */
if (funcNode.body != null) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
if (prevDefaultValueState == DefaultValueState.RECORD_FIELD_DEFAULT ||
prevDefaultValueState == DefaultValueState.OBJECT_FIELD_INITIALIZER) {
data.defaultValueState = DefaultValueState.FUNCTION_IN_DEFAULT_VALUE;
}
analyzeNode(funcNode.body, data);
data.defaultValueState = prevDefaultValueState;
}
reachabilityAnalyzer.analyzeReachability(funcNode, data.env);
data.returnTypes.pop();
data.returnWithinTransactionCheckStack.pop();
data.transactionalFuncCheckStack.pop();
}
private boolean isPublicInvokableNode(BLangInvokableNode invNode) {
return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) ||
Symbols.isPublic(invNode.symbol.owner));
}
@Override
public void visit(BLangBlockFunctionBody body, AnalyzerData data) {
boolean prevWithinTxScope = data.withinTransactionScope;
boolean prevLoopAlterNotAllowed = data.loopAlterNotAllowed;
data.loopAlterNotAllowed = data.loopCount > 0;
if (!prevWithinTxScope) {
data.withinTransactionScope = data.transactionalFuncCheckStack.peek();
}
data.env = SymbolEnv.createFuncBodyEnv(body, data.env);
for (BLangStatement e : body.stmts) {
data.inInternallyDefinedBlockStmt = true;
analyzeNode(e, data);
}
data.inInternallyDefinedBlockStmt = false;
if (data.transactionalFuncCheckStack.peek()) {
data.withinTransactionScope = prevWithinTxScope;
}
data.loopAlterNotAllowed = prevLoopAlterNotAllowed;
}
@Override
public void visit(BLangExprFunctionBody body, AnalyzerData data) {
analyzeExpr(body.expr, data);
}
@Override
public void visit(BLangExternalFunctionBody body, AnalyzerData data) {
}
@Override
public void visit(BLangForkJoin forkJoin, AnalyzerData data) {
if (forkJoin.workers.isEmpty()) {
dlog.error(forkJoin.pos, DiagnosticErrorCode.INVALID_FOR_JOIN_SYNTAX_EMPTY_FORK);
}
}
@Override
public void visit(BLangTransaction transactionNode, AnalyzerData data) {
if (data.transactionalFuncCheckStack.peek()) {
this.dlog.error(transactionNode.pos,
DiagnosticErrorCode.TRANSACTION_CANNOT_BE_USED_WITHIN_TRANSACTIONAL_SCOPE);
return;
}
data.errorTypes.push(new LinkedHashSet<>());
boolean previousWithinTxScope = data.withinTransactionScope;
int previousCommitCount = data.commitCount;
int previousRollbackCount = data.rollbackCount;
boolean prevCommitRollbackAllowed = data.commitRollbackAllowed;
data.commitRollbackAllowed = true;
data.commitCount = 0;
data.rollbackCount = 0;
data.withinTransactionScope = true;
data.loopWithinTransactionCheckStack.push(false);
data.returnWithinTransactionCheckStack.push(false);
data.transactionCount++;
boolean failureHandled = data.failureHandled;
if (!failureHandled) {
data.failureHandled = transactionNode.onFailClause != null;
}
analyzeNode(transactionNode.transactionBody, data);
data.failureHandled = failureHandled;
if (data.commitCount < 1) {
this.dlog.error(transactionNode.pos, DiagnosticErrorCode.INVALID_COMMIT_COUNT);
}
data.transactionCount--;
data.withinTransactionScope = previousWithinTxScope;
data.commitCount = previousCommitCount;
data.rollbackCount = previousRollbackCount;
data.commitRollbackAllowed = prevCommitRollbackAllowed;
data.returnWithinTransactionCheckStack.pop();
data.loopWithinTransactionCheckStack.pop();
analyzeOnFailClause(transactionNode.onFailClause, data);
data.errorTypes.pop();
}
private void analyzeOnFailClause(BLangOnFailClause onFailClause, AnalyzerData data) {
if (onFailClause != null) {
analyzeNode(onFailClause, data);
}
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr, AnalyzerData data) {
}
@Override
public void visit(BLangCommitExpr commitExpr, AnalyzerData data) {
data.commitCount++;
data.commitCountWithinBlock++;
if (data.transactionCount == 0) {
this.dlog.error(commitExpr.pos, DiagnosticErrorCode.COMMIT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
if (data.transactionalFuncCheckStack.peek()) {
this.dlog.error(commitExpr.pos, DiagnosticErrorCode.COMMIT_CANNOT_BE_WITHIN_TRANSACTIONAL_FUNCTION);
return;
}
if (!data.withinTransactionScope || !data.commitRollbackAllowed ||
data.loopWithinTransactionCheckStack.peek()) {
this.dlog.error(commitExpr.pos, DiagnosticErrorCode.COMMIT_NOT_ALLOWED);
return;
}
data.withinTransactionScope = false;
}
@Override
public void visit(BLangRollback rollbackNode, AnalyzerData data) {
data.rollbackCount++;
data.rollbackCountWithinBlock++;
if (data.transactionCount == 0 && !data.withinTransactionScope) {
this.dlog.error(rollbackNode.pos, DiagnosticErrorCode.ROLLBACK_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
if (!data.transactionalFuncCheckStack.empty() && data.transactionalFuncCheckStack.peek()) {
this.dlog.error(rollbackNode.pos, DiagnosticErrorCode.ROLLBACK_CANNOT_BE_WITHIN_TRANSACTIONAL_FUNCTION);
return;
}
if (!data.withinTransactionScope || !data.commitRollbackAllowed ||
(!data.loopWithinTransactionCheckStack.empty() && data.loopWithinTransactionCheckStack.peek())) {
this.dlog.error(rollbackNode.pos, DiagnosticErrorCode.ROLLBACK_NOT_ALLOWED);
return;
}
data.withinTransactionScope = false;
analyzeExpr(rollbackNode.expr, data);
}
@Override
public void visit(BLangRetry retryNode, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!failureHandled) {
data.failureHandled = retryNode.onFailClause != null;
}
visitNode(retryNode.retrySpec, data);
visitNode(retryNode.retryBody, data);
data.failureHandled = failureHandled;
retryNode.retryBody.failureBreakMode = retryNode.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(retryNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangRetrySpec retrySpec, AnalyzerData data) {
if (retrySpec.retryManagerType != null) {
BSymbol retryManagerTypeSymbol = symTable.langErrorModuleSymbol.scope
.lookup(names.fromString("RetryManager")).symbol;
BType abstractRetryManagerType = retryManagerTypeSymbol.type;
if (!types.isAssignable(retrySpec.retryManagerType.getBType(), abstractRetryManagerType)) {
dlog.error(retrySpec.pos, DiagnosticErrorCode.INVALID_INTERFACE_ON_NON_ABSTRACT_OBJECT,
RETRY_MANAGER_OBJECT_SHOULD_RETRY_FUNC, retrySpec.retryManagerType.getBType());
}
}
}
@Override
public void visit(BLangRetryTransaction retryTransaction, AnalyzerData data) {
analyzeNode(retryTransaction.retrySpec, data);
analyzeNode(retryTransaction.transaction, data);
}
@Override
public void visit(BLangBlockStmt blockNode, AnalyzerData data) {
int prevCommitCount = data.commitCountWithinBlock;
int prevRollbackCount = data.rollbackCountWithinBlock;
data.commitCountWithinBlock = 0;
data.rollbackCountWithinBlock = 0;
boolean inInternallyDefinedBlockStmt = data.inInternallyDefinedBlockStmt;
data.inInternallyDefinedBlockStmt = checkBlockIsAnInternalBlockInImmediateFunctionBody(blockNode);
data.env = SymbolEnv.createBlockEnv(blockNode, data.env);
blockNode.stmts.forEach(e -> analyzeNode(e, data));
data.inInternallyDefinedBlockStmt = inInternallyDefinedBlockStmt;
if (data.commitCountWithinBlock > 1 || data.rollbackCountWithinBlock > 1) {
this.dlog.error(blockNode.pos, DiagnosticErrorCode.MAX_ONE_COMMIT_ROLLBACK_ALLOWED_WITHIN_A_BRANCH);
}
data.commitCountWithinBlock = prevCommitCount;
data.rollbackCountWithinBlock = prevRollbackCount;
}
private boolean checkBlockIsAnInternalBlockInImmediateFunctionBody(BLangNode node) {
BLangNode parent = node.parent;
while (parent != null) {
final NodeKind kind = parent.getKind();
if (kind == NodeKind.BLOCK_FUNCTION_BODY) {
return true;
}
if (kind == NodeKind.BLOCK) {
parent = parent.parent;
} else {
return false;
}
}
return false;
}
@Override
public void visit(BLangReturn returnStmt, AnalyzerData data) {
if (checkReturnValidityInTransaction(data)) {
this.dlog.error(returnStmt.pos, DiagnosticErrorCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
analyzeExpr(returnStmt.expr, data);
data.returnTypes.peek().add(returnStmt.expr.getBType());
}
@Override
public void visit(BLangIf ifStmt, AnalyzerData data) {
boolean independentBlocks = false;
int prevCommitCount = data.commitCount;
int prevRollbackCount = data.rollbackCount;
BLangStatement elseStmt = ifStmt.elseStmt;
if (data.withinTransactionScope && elseStmt != null && elseStmt.getKind() != NodeKind.IF) {
independentBlocks = true;
data.commitRollbackAllowed = true;
}
boolean prevTxMode = data.withinTransactionScope;
if ((ifStmt.expr.getKind() == NodeKind.GROUP_EXPR ?
((BLangGroupExpr) ifStmt.expr).expression.getKind() :
ifStmt.expr.getKind()) == NodeKind.TRANSACTIONAL_EXPRESSION) {
data.withinTransactionScope = true;
}
BLangBlockStmt body = ifStmt.body;
analyzeNode(body, data);
if (ifStmt.expr.getKind() == NodeKind.TRANSACTIONAL_EXPRESSION) {
data.withinTransactionScope = prevTxMode;
}
if (elseStmt != null) {
if (independentBlocks) {
data.commitRollbackAllowed = true;
data.withinTransactionScope = true;
}
analyzeNode(elseStmt, data);
if ((prevCommitCount != data.commitCount) || prevRollbackCount != data.rollbackCount) {
data.commitRollbackAllowed = false;
}
}
analyzeExpr(ifStmt.expr, data);
}
@Override
public void visit(BLangMatchStatement matchStatement, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
analyzeExpr(matchStatement.expr, data);
boolean failureHandled = data.failureHandled;
if (!failureHandled) {
data.failureHandled = matchStatement.onFailClause != null;
}
List<BLangMatchClause> matchClauses = matchStatement.matchClauses;
int clausesSize = matchClauses.size();
for (int i = 0; i < clausesSize; i++) {
BLangMatchClause firstClause = matchClauses.get(i);
for (int j = i + 1; j < clausesSize; j++) {
BLangMatchClause secondClause = matchClauses.get(j);
if (!checkSimilarMatchGuard(firstClause.matchGuard, secondClause.matchGuard)) {
if (firstClause.matchGuard == null) {
checkSimilarMatchPatternsBetweenClauses(firstClause, secondClause);
}
continue;
}
checkSimilarMatchPatternsBetweenClauses(firstClause, secondClause);
}
analyzeNode(firstClause, data);
}
data.failureHandled = failureHandled;
analyzeOnFailClause(matchStatement.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangMatchClause matchClause, AnalyzerData data) {
Map<String, BVarSymbol> variablesInMatchPattern = new HashMap<>();
boolean patternListContainsSameVars = true;
List<BLangMatchPattern> matchPatterns = matchClause.matchPatterns;
BLangMatchGuard matchGuard = matchClause.matchGuard;
for (int i = 0; i < matchPatterns.size(); i++) {
BLangMatchPattern matchPattern = matchPatterns.get(i);
if (matchPattern.getBType() == symTable.noType) {
dlog.warning(matchPattern.pos, DiagnosticWarningCode.MATCH_STMT_UNMATCHED_PATTERN);
}
if (patternListContainsSameVars) {
patternListContainsSameVars = compareVariables(variablesInMatchPattern, matchPattern);
}
for (int j = i - 1; j >= 0; j--) {
if (checkSimilarMatchPatterns(matchPatterns.get(j), matchPattern)) {
dlog.warning(matchPattern.pos, DiagnosticWarningCode.MATCH_STMT_PATTERN_UNREACHABLE);
}
}
analyzeNode(matchPattern, data);
}
if (matchGuard != null) {
analyzeNode(matchGuard, data);
}
if (!patternListContainsSameVars) {
dlog.error(matchClause.pos, DiagnosticErrorCode.MATCH_PATTERNS_SHOULD_CONTAIN_SAME_SET_OF_VARIABLES);
}
analyzeNode(matchClause.blockStmt, data);
}
@Override
public void visit(BLangMappingMatchPattern mappingMatchPattern, AnalyzerData data) {
}
@Override
public void visit(BLangFieldMatchPattern fieldMatchPattern, AnalyzerData data) {
}
@Override
public void visit(BLangMatchGuard matchGuard, AnalyzerData data) {
analyzeExpr(matchGuard.expr, data);
}
private void checkSimilarMatchPatternsBetweenClauses(BLangMatchClause firstClause, BLangMatchClause secondClause) {
for (BLangMatchPattern firstMatchPattern : firstClause.matchPatterns) {
for (BLangMatchPattern secondMatchPattern : secondClause.matchPatterns) {
if (checkSimilarMatchPatterns(firstMatchPattern, secondMatchPattern)) {
dlog.warning(secondMatchPattern.pos, DiagnosticWarningCode.MATCH_STMT_PATTERN_UNREACHABLE);
}
}
}
}
private boolean checkSimilarMatchPatterns(BLangMatchPattern firstPattern, BLangMatchPattern secondPattern) {
NodeKind firstPatternKind = firstPattern.getKind();
NodeKind secondPatternKind = secondPattern.getKind();
if (firstPatternKind != secondPatternKind) {
if (firstPatternKind == NodeKind.VAR_BINDING_PATTERN_MATCH_PATTERN) {
return checkEmptyListOrMapMatchWithVarBindingPatternMatch(secondPattern,
((BLangVarBindingPatternMatchPattern) firstPattern));
}
if (secondPatternKind == NodeKind.VAR_BINDING_PATTERN_MATCH_PATTERN) {
return checkEmptyListOrMapMatchWithVarBindingPatternMatch(firstPattern,
((BLangVarBindingPatternMatchPattern) secondPattern));
}
return false;
}
switch (firstPatternKind) {
case WILDCARD_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
case CONST_MATCH_PATTERN:
return checkSimilarConstMatchPattern((BLangConstPattern) firstPattern,
(BLangConstPattern) secondPattern);
case VAR_BINDING_PATTERN_MATCH_PATTERN:
return checkSimilarBindingPatterns(
((BLangVarBindingPatternMatchPattern) firstPattern).getBindingPattern(),
((BLangVarBindingPatternMatchPattern) secondPattern).getBindingPattern());
case LIST_MATCH_PATTERN:
return checkSimilarListMatchPattern((BLangListMatchPattern) firstPattern,
(BLangListMatchPattern) secondPattern);
case MAPPING_MATCH_PATTERN:
return checkSimilarMappingMatchPattern((BLangMappingMatchPattern) firstPattern,
(BLangMappingMatchPattern) secondPattern);
case ERROR_MATCH_PATTERN:
return checkSimilarErrorMatchPattern((BLangErrorMatchPattern) firstPattern,
(BLangErrorMatchPattern) secondPattern);
default:
return false;
}
}
private boolean checkEmptyListOrMapMatchWithVarBindingPatternMatch(BLangMatchPattern firstPattern,
BLangVarBindingPatternMatchPattern secondPattern) {
if (firstPattern.getKind() == NodeKind.LIST_MATCH_PATTERN) {
BLangBindingPattern bindingPattern = secondPattern.getBindingPattern();
if (bindingPattern.getKind() != NodeKind.LIST_BINDING_PATTERN) {
return false;
}
BLangListMatchPattern listMatchPattern = (BLangListMatchPattern) firstPattern;
BLangListBindingPattern listBindingPattern = (BLangListBindingPattern) bindingPattern;
return listMatchPattern.matchPatterns.isEmpty() && listBindingPattern.bindingPatterns.isEmpty() &&
listMatchPattern.restMatchPattern == null && listBindingPattern.restBindingPattern == null;
}
if (firstPattern.getKind() == NodeKind.MAPPING_MATCH_PATTERN) {
BLangBindingPattern bindingPattern = secondPattern.getBindingPattern();
if (secondPattern.getBindingPattern().getKind() != NodeKind.MAPPING_BINDING_PATTERN) {
return false;
}
BLangMappingMatchPattern mappingMatchPattern = (BLangMappingMatchPattern) firstPattern;
BLangMappingBindingPattern mappingBindingPattern = (BLangMappingBindingPattern) bindingPattern;
return mappingMatchPattern.fieldMatchPatterns.isEmpty() &&
mappingBindingPattern.fieldBindingPatterns.isEmpty() &&
mappingMatchPattern.restMatchPattern == null && mappingBindingPattern.restBindingPattern == null;
}
return false;
}
private boolean checkSimilarErrorMatchPattern(BLangErrorMatchPattern firstErrorMatchPattern,
BLangErrorMatchPattern secondErrorMatchPattern) {
if (firstErrorMatchPattern == null || secondErrorMatchPattern == null) {
return false;
}
if (!checkSimilarErrorTypeReference(firstErrorMatchPattern.errorTypeReference,
secondErrorMatchPattern.errorTypeReference)) {
return false;
}
if (!checkSimilarErrorMessagePattern(firstErrorMatchPattern.errorMessageMatchPattern,
secondErrorMatchPattern.errorMessageMatchPattern)) {
return false;
}
if (!checkSimilarErrorCauseMatchPattern(firstErrorMatchPattern.errorCauseMatchPattern,
secondErrorMatchPattern.errorCauseMatchPattern)) {
return false;
}
return checkSimilarErrorFieldMatchPatterns(firstErrorMatchPattern.errorFieldMatchPatterns,
secondErrorMatchPattern.errorFieldMatchPatterns);
}
private boolean checkSimilarErrorTypeReference(BLangUserDefinedType firstErrorTypeRef,
BLangUserDefinedType secondErrorTypeRef) {
if (firstErrorTypeRef != null && secondErrorTypeRef != null) {
return firstErrorTypeRef.typeName.value.equals(secondErrorTypeRef.typeName.value);
}
return firstErrorTypeRef == null && secondErrorTypeRef == null;
}
private boolean checkSimilarErrorMessagePattern(BLangErrorMessageMatchPattern firstErrorMsgMatchPattern,
BLangErrorMessageMatchPattern secondErrorMsgMatchPattern) {
if (firstErrorMsgMatchPattern != null && secondErrorMsgMatchPattern != null) {
return checkSimilarSimpleMatchPattern(firstErrorMsgMatchPattern.simpleMatchPattern,
secondErrorMsgMatchPattern.simpleMatchPattern);
}
return firstErrorMsgMatchPattern == null && secondErrorMsgMatchPattern == null;
}
private boolean checkSimilarSimpleMatchPattern(BLangSimpleMatchPattern firstSimpleMatchPattern,
BLangSimpleMatchPattern secondSimpleMatchPattern) {
if (firstSimpleMatchPattern != null && secondSimpleMatchPattern != null) {
if (firstSimpleMatchPattern.varVariableName != null) {
return true;
}
BLangConstPattern firstConstPattern = firstSimpleMatchPattern.constPattern;
BLangConstPattern secondConstPattern = secondSimpleMatchPattern.constPattern;
if (firstConstPattern != null) {
if (secondConstPattern != null) {
return checkSimilarConstMatchPattern(firstConstPattern, secondConstPattern);
}
return false;
}
return secondSimpleMatchPattern.varVariableName == null;
}
return firstSimpleMatchPattern == null && secondSimpleMatchPattern == null;
}
private boolean checkSimilarErrorCauseMatchPattern(BLangErrorCauseMatchPattern firstErrorCauseMatchPattern,
BLangErrorCauseMatchPattern secondErrorCauseMatchPattern) {
if (firstErrorCauseMatchPattern != null && secondErrorCauseMatchPattern != null) {
if (!checkSimilarSimpleMatchPattern(firstErrorCauseMatchPattern.simpleMatchPattern,
secondErrorCauseMatchPattern.simpleMatchPattern)) {
return false;
}
return checkSimilarErrorMatchPattern(firstErrorCauseMatchPattern.errorMatchPattern,
secondErrorCauseMatchPattern.errorMatchPattern);
}
return firstErrorCauseMatchPattern == null && secondErrorCauseMatchPattern == null;
}
private boolean checkSimilarErrorFieldMatchPatterns(BLangErrorFieldMatchPatterns firstErrorFieldMatchPatterns,
BLangErrorFieldMatchPatterns secondErrorFieldMatchPatterns) {
if (firstErrorFieldMatchPatterns == null) {
return true;
}
List<BLangNamedArgMatchPattern> firstNamedArgPatterns = firstErrorFieldMatchPatterns.namedArgMatchPatterns;
int firstNamedArgPatternsSize = firstNamedArgPatterns.size();
if (firstNamedArgPatternsSize == 0) {
return true;
}
if (secondErrorFieldMatchPatterns == null) {
return false;
}
List<BLangNamedArgMatchPattern> secondNamedArgPatterns = secondErrorFieldMatchPatterns.namedArgMatchPatterns;
if (firstNamedArgPatternsSize > secondNamedArgPatterns.size()) {
return false;
}
for (int i = 0; i < firstNamedArgPatternsSize; i++) {
if (!checkSimilarNamedArgMatchPatterns(firstNamedArgPatterns.get(i), secondNamedArgPatterns.get(i))) {
return false;
}
}
return true;
}
private boolean checkSimilarNamedArgMatchPatterns(BLangNamedArgMatchPattern firstNamedArgMatchPattern,
BLangNamedArgMatchPattern secondNamedArgMatchPattern) {
if (firstNamedArgMatchPattern.argName.value.equals(secondNamedArgMatchPattern.argName.value)) {
return checkSimilarMatchPatterns(firstNamedArgMatchPattern.matchPattern,
secondNamedArgMatchPattern.matchPattern);
}
return false;
}
private boolean checkSimilarConstMatchPattern(BLangConstPattern firstConstMatchPattern,
BLangConstPattern secondConstMatchPattern) {
Object firstConstValue = getConstValue(firstConstMatchPattern).keySet().iterator().next();
Object secondConstValue = getConstValue(secondConstMatchPattern).keySet().iterator().next();
BType firstConstType = getConstValue(firstConstMatchPattern).values().iterator().next();
BType secondConstType = getConstValue(secondConstMatchPattern).values().iterator().next();
if (firstConstValue == null || secondConstValue == null) {
return false;
}
if (firstConstValue.equals(secondConstValue)) {
return true;
}
if (firstConstType != null && Types.getReferredType(firstConstType).tag == TypeTags.FINITE) {
firstConstValue = getConstValueFromFiniteType(((BFiniteType) firstConstType));
}
if (secondConstType != null && Types.getReferredType(secondConstType).tag == TypeTags.FINITE) {
secondConstValue = getConstValueFromFiniteType(((BFiniteType) secondConstType));
}
if (firstConstValue == null || secondConstValue == null) {
return false;
}
return firstConstValue.equals(secondConstValue);
}
private HashMap<Object, BType> getConstValue(BLangConstPattern constPattern) {
HashMap<Object, BType> constValAndType = new HashMap<>();
switch (constPattern.expr.getKind()) {
case NUMERIC_LITERAL:
constValAndType.put(((BLangNumericLiteral) constPattern.expr).value, null);
break;
case LITERAL:
constValAndType.put(((BLangLiteral) constPattern.expr).value, null);
break;
case SIMPLE_VARIABLE_REF:
constValAndType.put(((BLangSimpleVarRef) constPattern.expr).variableName, constPattern.getBType());
break;
case UNARY_EXPR:
BLangNumericLiteral newNumericLiteral = Types.constructNumericLiteralFromUnaryExpr(
(BLangUnaryExpr) constPattern.expr);
constValAndType.put(newNumericLiteral.value, null);
}
return constValAndType;
}
private Object getConstValueFromFiniteType(BFiniteType type) {
if (type.getValueSpace().size() == 1) {
BLangExpression expr = type.getValueSpace().iterator().next();
switch (expr.getKind()) {
case NUMERIC_LITERAL:
return ((BLangNumericLiteral) expr).value;
case LITERAL:
return ((BLangLiteral) expr).value;
}
}
return null;
}
private boolean checkSimilarListMatchPattern(BLangListMatchPattern firstListMatchPattern,
BLangListMatchPattern secondListMatchPattern) {
List<BLangMatchPattern> firstMatchPatterns = firstListMatchPattern.matchPatterns;
List<BLangMatchPattern> secondMatchPatterns = secondListMatchPattern.matchPatterns;
int firstPatternsSize = firstMatchPatterns.size();
int secondPatternsSize = secondMatchPatterns.size();
if (firstPatternsSize <= secondPatternsSize) {
for (int i = 0; i < firstPatternsSize; i++) {
if (!checkSimilarMatchPatterns(firstMatchPatterns.get(i), secondMatchPatterns.get(i))) {
return false;
}
}
if (firstPatternsSize == secondPatternsSize) {
if (firstListMatchPattern.restMatchPattern != null) {
return true;
}
return secondListMatchPattern.restMatchPattern == null;
}
return firstListMatchPattern.restMatchPattern != null;
}
return false;
}
private boolean checkSimilarMappingMatchPattern(BLangMappingMatchPattern firstMappingMatchPattern,
BLangMappingMatchPattern secondMappingMatchPattern) {
List<BLangFieldMatchPattern> firstFieldMatchPatterns = firstMappingMatchPattern.fieldMatchPatterns;
List<BLangFieldMatchPattern> secondFieldMatchPatterns = secondMappingMatchPattern.fieldMatchPatterns;
return checkSimilarFieldMatchPatterns(firstFieldMatchPatterns, secondFieldMatchPatterns);
}
private boolean checkSimilarFieldMatchPatterns(List<BLangFieldMatchPattern> firstFieldMatchPatterns,
List<BLangFieldMatchPattern> secondFieldMatchPatterns) {
for (BLangFieldMatchPattern firstFieldMatchPattern : firstFieldMatchPatterns) {
boolean isSamePattern = false;
for (BLangFieldMatchPattern secondFieldMatchPattern : secondFieldMatchPatterns) {
if (checkSimilarFieldMatchPattern(firstFieldMatchPattern, secondFieldMatchPattern)) {
isSamePattern = true;
break;
}
}
if (!isSamePattern) {
return false;
}
}
return true;
}
private boolean checkSimilarFieldMatchPattern(BLangFieldMatchPattern firstFieldMatchPattern,
BLangFieldMatchPattern secondFieldMatchPattern) {
return firstFieldMatchPattern.fieldName.value.equals(secondFieldMatchPattern.fieldName.value) &&
checkSimilarMatchPatterns(firstFieldMatchPattern.matchPattern, secondFieldMatchPattern.matchPattern);
}
private boolean checkSimilarBindingPatterns(BLangBindingPattern firstBidingPattern,
BLangBindingPattern secondBindingPattern) {
NodeKind firstBindingPatternKind = firstBidingPattern.getKind();
NodeKind secondBindingPatternKind = secondBindingPattern.getKind();
if (firstBindingPatternKind != secondBindingPatternKind) {
return false;
}
switch (firstBindingPatternKind) {
case WILDCARD_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case CAPTURE_BINDING_PATTERN:
return true;
case LIST_BINDING_PATTERN:
return checkSimilarListBindingPatterns((BLangListBindingPattern) firstBidingPattern,
(BLangListBindingPattern) secondBindingPattern);
case MAPPING_BINDING_PATTERN:
return checkSimilarMappingBindingPattern((BLangMappingBindingPattern) firstBidingPattern,
(BLangMappingBindingPattern) secondBindingPattern);
case ERROR_BINDING_PATTERN:
return checkSimilarErrorBindingPatterns((BLangErrorBindingPattern) firstBidingPattern,
(BLangErrorBindingPattern) secondBindingPattern);
default:
return false;
}
}
private boolean checkSimilarMappingBindingPattern(BLangMappingBindingPattern firstMappingBindingPattern,
BLangMappingBindingPattern secondMappingBindingPattern) {
List<BLangFieldBindingPattern> firstFieldBindingPatterns = firstMappingBindingPattern.fieldBindingPatterns;
List<BLangFieldBindingPattern> secondFieldBindingPatterns = secondMappingBindingPattern.fieldBindingPatterns;
return checkSimilarFieldBindingPatterns(firstFieldBindingPatterns, secondFieldBindingPatterns);
}
private boolean checkSimilarFieldBindingPatterns(List<BLangFieldBindingPattern> firstFieldBindingPatterns,
List<BLangFieldBindingPattern> secondFieldBindingPatterns) {
for (BLangFieldBindingPattern firstFieldBindingPattern : firstFieldBindingPatterns) {
boolean isSamePattern = false;
for (BLangFieldBindingPattern secondFieldBindingPattern : secondFieldBindingPatterns) {
if (checkSimilarFieldBindingPattern(firstFieldBindingPattern, secondFieldBindingPattern)) {
isSamePattern = true;
break;
}
}
if (!isSamePattern) {
return false;
}
}
return true;
}
private boolean checkSimilarFieldBindingPattern(BLangFieldBindingPattern firstFieldBindingPattern,
BLangFieldBindingPattern secondFieldBindingPattern) {
boolean hasSameFieldNames = firstFieldBindingPattern.fieldName.value.
equals(secondFieldBindingPattern.fieldName.value);
if (firstFieldBindingPattern.bindingPattern.getKind() == secondFieldBindingPattern.bindingPattern.getKind()) {
return hasSameFieldNames && checkSimilarBindingPatterns(firstFieldBindingPattern.bindingPattern,
secondFieldBindingPattern.bindingPattern);
}
return hasSameFieldNames && firstFieldBindingPattern.bindingPattern.getKind() ==
NodeKind.CAPTURE_BINDING_PATTERN;
}
private boolean checkSimilarListBindingPatterns(BLangListBindingPattern firstBindingPattern,
BLangListBindingPattern secondBindingPattern) {
List<BLangBindingPattern> firstPatterns = firstBindingPattern.bindingPatterns;
List<BLangBindingPattern> secondPatterns = secondBindingPattern.bindingPatterns;
int firstPatternsSize = firstPatterns.size();
int secondPatternsSize = secondPatterns.size();
if (firstPatternsSize <= secondPatternsSize) {
for (int i = 0; i < firstPatternsSize; i++) {
if (!checkSimilarBindingPatterns(firstPatterns.get(i), secondPatterns.get(i))) {
return firstPatterns.get(i).getKind() == NodeKind.CAPTURE_BINDING_PATTERN;
}
}
if (firstPatternsSize == secondPatternsSize) {
if (firstBindingPattern.restBindingPattern != null) {
return true;
}
return secondBindingPattern.restBindingPattern == null;
}
return secondBindingPattern.restBindingPattern != null;
}
return false;
}
private boolean checkSimilarErrorBindingPatterns(BLangErrorBindingPattern firstErrorBindingPattern,
BLangErrorBindingPattern secondErrorBindingPattern) {
if (firstErrorBindingPattern == null || secondErrorBindingPattern == null) {
return false;
}
if (!checkSimilarErrorTypeReference(firstErrorBindingPattern.errorTypeReference,
secondErrorBindingPattern.errorTypeReference)) {
return false;
}
if (!checkSimilarErrorMessageBindingPattern(firstErrorBindingPattern.errorMessageBindingPattern,
secondErrorBindingPattern.errorMessageBindingPattern)) {
return false;
}
if (!checkSimilarErrorCauseBindingPattern(firstErrorBindingPattern.errorCauseBindingPattern,
secondErrorBindingPattern.errorCauseBindingPattern)) {
return false;
}
return checkSimilarErrorFieldBindingPatterns(firstErrorBindingPattern.errorFieldBindingPatterns,
secondErrorBindingPattern.errorFieldBindingPatterns);
}
private boolean checkSimilarErrorMessageBindingPattern(BLangErrorMessageBindingPattern firstErrorMsgBindingPattern,
BLangErrorMessageBindingPattern secondErrorMsgBindingPattern) {
if (firstErrorMsgBindingPattern != null && secondErrorMsgBindingPattern != null) {
return checkSimilarSimpleBindingPattern(firstErrorMsgBindingPattern.simpleBindingPattern,
secondErrorMsgBindingPattern.simpleBindingPattern);
}
return firstErrorMsgBindingPattern == null && secondErrorMsgBindingPattern == null;
}
private boolean checkSimilarSimpleBindingPattern(BLangSimpleBindingPattern firstSimpleBindingPattern,
BLangSimpleBindingPattern secondSimpleBindingPattern) {
if (firstSimpleBindingPattern != null && secondSimpleBindingPattern != null) {
BLangBindingPattern firstCaptureBindingPattern = firstSimpleBindingPattern.captureBindingPattern;
BLangBindingPattern secondCaptureBindingPattern = secondSimpleBindingPattern.captureBindingPattern;
if (firstCaptureBindingPattern != null && secondCaptureBindingPattern != null) {
return checkSimilarBindingPatterns(firstCaptureBindingPattern, secondCaptureBindingPattern);
}
return firstSimpleBindingPattern.wildCardBindingPattern != null;
}
return firstSimpleBindingPattern == null && secondSimpleBindingPattern == null;
}
private boolean checkSimilarErrorCauseBindingPattern(BLangErrorCauseBindingPattern firstErrorCauseBindingPattern,
BLangErrorCauseBindingPattern secondErrorCauseBindingPattern) {
if (firstErrorCauseBindingPattern != null && secondErrorCauseBindingPattern != null) {
if (!checkSimilarSimpleBindingPattern(firstErrorCauseBindingPattern.simpleBindingPattern,
secondErrorCauseBindingPattern.simpleBindingPattern)) {
return false;
}
return checkSimilarErrorBindingPatterns(firstErrorCauseBindingPattern.errorBindingPattern,
secondErrorCauseBindingPattern.errorBindingPattern);
}
return firstErrorCauseBindingPattern == null && secondErrorCauseBindingPattern == null;
}
private boolean checkSimilarErrorFieldBindingPatterns(
BLangErrorFieldBindingPatterns firstErrorFieldBindingPatterns,
BLangErrorFieldBindingPatterns secondErrorFieldBindingPatterns) {
if (firstErrorFieldBindingPatterns == null) {
return true;
}
List<BLangNamedArgBindingPattern> firstNamedArgPatterns =
firstErrorFieldBindingPatterns.namedArgBindingPatterns;
int firstNamedArgPatternsSize = firstNamedArgPatterns.size();
if (firstNamedArgPatternsSize == 0) {
return true;
}
if (secondErrorFieldBindingPatterns == null) {
return false;
}
List<BLangNamedArgBindingPattern> secondNamedArgPatterns =
secondErrorFieldBindingPatterns.namedArgBindingPatterns;
if (firstNamedArgPatternsSize > secondNamedArgPatterns.size()) {
return false;
}
for (int i = 0; i < firstNamedArgPatternsSize; i++) {
if (!checkSimilarNamedArgBindingPatterns(firstNamedArgPatterns.get(i), secondNamedArgPatterns.get(i))) {
return false;
}
}
return true;
}
private boolean checkSimilarNamedArgBindingPatterns(BLangNamedArgBindingPattern firstNamedArgBindingPattern,
BLangNamedArgBindingPattern secondNamedArgBindingPattern) {
if (firstNamedArgBindingPattern.argName.value.equals(secondNamedArgBindingPattern.argName.value)) {
return checkSimilarBindingPatterns(firstNamedArgBindingPattern.bindingPattern,
secondNamedArgBindingPattern.bindingPattern);
}
return false;
}
private boolean checkSimilarMatchGuard(BLangMatchGuard firstMatchGuard, BLangMatchGuard secondMatchGuard) {
if (firstMatchGuard == null && secondMatchGuard == null) {
return true;
}
if (firstMatchGuard == null || secondMatchGuard == null) {
return false;
}
if (firstMatchGuard.expr.getKind() == NodeKind.TYPE_TEST_EXPR &&
secondMatchGuard.expr.getKind() == NodeKind.TYPE_TEST_EXPR &&
((BLangTypeTestExpr) firstMatchGuard.expr).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
((BLangTypeTestExpr) secondMatchGuard.expr).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangTypeTestExpr firstTypeTest = (BLangTypeTestExpr) firstMatchGuard.expr;
BLangTypeTestExpr secondTypeTest = (BLangTypeTestExpr) secondMatchGuard.expr;
return ((BLangSimpleVarRef) firstTypeTest.expr).variableName.toString().equals(
((BLangSimpleVarRef) secondTypeTest.expr).variableName.toString()) &&
types.isAssignable(firstTypeTest.typeNode.getBType(),
secondTypeTest.typeNode.getBType());
}
return false;
}
private boolean compareVariables(Map<String, BVarSymbol> varsInPreviousMatchPattern,
BLangMatchPattern matchPattern) {
Map<String, BVarSymbol> varsInCurrentMatchPattern = matchPattern.declaredVars;
if (varsInPreviousMatchPattern.size() == 0) {
varsInPreviousMatchPattern.putAll(varsInCurrentMatchPattern);
return true;
}
if (varsInPreviousMatchPattern.size() != varsInCurrentMatchPattern.size()) {
return false;
}
for (String identifier : varsInPreviousMatchPattern.keySet()) {
if (!varsInCurrentMatchPattern.containsKey(identifier)) {
return false;
}
}
return true;
}
@Override
public void visit(BLangWildCardMatchPattern wildCardMatchPattern, AnalyzerData data) {
wildCardMatchPattern.isLastPattern =
wildCardMatchPattern.matchExpr != null && types.isAssignable(wildCardMatchPattern.matchExpr.getBType(),
symTable.anyType);
}
@Override
public void visit(BLangConstPattern constMatchPattern, AnalyzerData data) {
analyzeNode(constMatchPattern.expr, data);
}
@Override
public void visit(BLangVarBindingPatternMatchPattern varBindingPattern, AnalyzerData data) {
BLangBindingPattern bindingPattern = varBindingPattern.getBindingPattern();
analyzeNode(bindingPattern, data);
switch (bindingPattern.getKind()) {
case WILDCARD_BINDING_PATTERN:
varBindingPattern.isLastPattern =
varBindingPattern.matchExpr != null && types.isAssignable(
varBindingPattern.matchExpr.getBType(),
symTable.anyType);
return;
case CAPTURE_BINDING_PATTERN:
varBindingPattern.isLastPattern =
varBindingPattern.matchExpr != null && !varBindingPattern.matchGuardIsAvailable;
return;
case LIST_BINDING_PATTERN:
if (varBindingPattern.matchExpr == null) {
return;
}
varBindingPattern.isLastPattern = types.isSameType(varBindingPattern.matchExpr.getBType(),
varBindingPattern.getBType()) || types.isAssignable(
varBindingPattern.matchExpr.getBType(),
varBindingPattern.getBType());
}
}
@Override
public void visit(BLangMappingBindingPattern mappingBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangWildCardBindingPattern wildCardBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangListMatchPattern listMatchPattern, AnalyzerData data) {
if (listMatchPattern.matchExpr == null) {
return;
}
listMatchPattern.isLastPattern = types.isAssignable(listMatchPattern.matchExpr.getBType(),
listMatchPattern.getBType()) && !isConstMatchPatternExist(listMatchPattern);
}
private boolean isConstMatchPatternExist(BLangMatchPattern matchPattern) {
switch (matchPattern.getKind()) {
case CONST_MATCH_PATTERN:
return true;
case LIST_MATCH_PATTERN:
for (BLangMatchPattern memberMatchPattern : ((BLangListMatchPattern) matchPattern).matchPatterns) {
if (isConstMatchPatternExist(memberMatchPattern)) {
return true;
}
}
return false;
case MAPPING_MATCH_PATTERN:
for (BLangFieldMatchPattern fieldMatchPattern :
((BLangMappingMatchPattern) matchPattern).fieldMatchPatterns) {
if (isConstMatchPatternExist(fieldMatchPattern.matchPattern)) {
return true;
}
}
return false;
default:
return false;
}
}
@Override
public void visit(BLangCaptureBindingPattern captureBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangListBindingPattern listBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangErrorMatchPattern errorMatchPattern, AnalyzerData data) {
}
@Override
public void visit(BLangErrorBindingPattern errorBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangForeach foreach, AnalyzerData data) {
data.loopWithinTransactionCheckStack.push(true);
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = foreach.onFailClause != null;
}
data.loopCount++;
BLangBlockStmt body = foreach.body;
data.env = SymbolEnv.createLoopEnv(foreach, data.env);
analyzeNode(body, data);
data.loopCount--;
data.failureHandled = failureHandled;
data.loopWithinTransactionCheckStack.pop();
analyzeExpr(foreach.collection, data);
body.failureBreakMode = foreach.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(foreach.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangWhile whileNode, AnalyzerData data) {
data.loopWithinTransactionCheckStack.push(true);
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = whileNode.onFailClause != null;
}
data.loopCount++;
BLangBlockStmt body = whileNode.body;
data.env = SymbolEnv.createLoopEnv(whileNode, data.env);
analyzeNode(body, data);
data.loopCount--;
data.failureHandled = failureHandled;
data.loopWithinTransactionCheckStack.pop();
analyzeExpr(whileNode.expr, data);
analyzeOnFailClause(whileNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangDo doNode, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = doNode.onFailClause != null;
}
analyzeNode(doNode.body, data);
data.failureHandled = failureHandled;
doNode.body.failureBreakMode = doNode.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(doNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangFail failNode, AnalyzerData data) {
data.failVisited = true;
analyzeExpr(failNode.expr, data);
if (data.env.scope.owner.getKind() == SymbolKind.PACKAGE) {
return;
}
typeChecker.checkExpr(failNode.expr, data.env);
if (!data.errorTypes.empty()) {
data.errorTypes.peek().add(getErrorTypes(failNode.expr.getBType()));
}
if (!data.failureHandled) {
BType exprType = data.env.enclInvokable.getReturnTypeNode().getBType();
data.returnTypes.peek().add(exprType);
if (!types.isAssignable(getErrorTypes(failNode.expr.getBType()), exprType)) {
dlog.error(failNode.pos, DiagnosticErrorCode.FAIL_EXPR_NO_MATCHING_ERROR_RETURN_IN_ENCL_INVOKABLE);
}
}
}
@Override
public void visit(BLangLock lockNode, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = lockNode.onFailClause != null;
}
boolean previousWithinLockBlock = data.withinLockBlock;
data.withinLockBlock = true;
lockNode.body.stmts.forEach(e -> analyzeNode(e, data));
data.withinLockBlock = previousWithinLockBlock;
data.failureHandled = failureHandled;
lockNode.body.failureBreakMode = lockNode.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(lockNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangContinue continueNode, AnalyzerData data) {
if (data.loopCount == 0) {
this.dlog.error(continueNode.pos, DiagnosticErrorCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction(data)) {
this.dlog.error(continueNode.pos, DiagnosticErrorCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
if (data.loopAlterNotAllowed) {
this.dlog.error(continueNode.pos, DiagnosticErrorCode.CONTINUE_NOT_ALLOWED);
}
}
@Override
public void visit(BLangImportPackage importPkgNode, AnalyzerData data) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
if (pkgEnv == null) {
return;
}
analyzeNode(pkgEnv.node, data);
}
@Override
public void visit(BLangXMLNS xmlnsNode, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangClientDeclaration node, AnalyzerData data) {
}
@Override
public void visit(BLangService serviceNode, AnalyzerData data) {
}
private void analyzeExportableTypeRef(BSymbol owner, BTypeSymbol symbol, boolean inFuncSignature,
Location pos) {
if (!inFuncSignature && Symbols.isFlagOn(owner.flags, Flags.ANONYMOUS)) {
return;
}
if (Symbols.isPublic(owner)) {
HashSet<BTypeSymbol> visitedSymbols = new HashSet<>();
checkForExportableType(symbol, pos, visitedSymbols);
}
}
private void checkForExportableType(BTypeSymbol symbol, Location pos, HashSet<BTypeSymbol> visitedSymbols) {
if (symbol == null || symbol.type == null || Symbols.isFlagOn(symbol.flags, Flags.TYPE_PARAM)) {
return;
}
if (!visitedSymbols.add(symbol)) {
return;
}
BType symbolType = symbol.type;
switch (symbolType.tag) {
case TypeTags.ARRAY:
checkForExportableType(((BArrayType) symbolType).eType.tsymbol, pos, visitedSymbols);
return;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) symbolType;
tupleType.tupleTypes.forEach(t -> checkForExportableType(t.tsymbol, pos, visitedSymbols));
if (tupleType.restType != null) {
checkForExportableType(tupleType.restType.tsymbol, pos, visitedSymbols);
}
return;
case TypeTags.MAP:
checkForExportableType(((BMapType) symbolType).constraint.tsymbol, pos, visitedSymbols);
return;
case TypeTags.RECORD:
if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) {
BRecordType recordType = (BRecordType) symbolType;
recordType.fields.values().forEach(f -> checkForExportableType(f.type.tsymbol, pos,
visitedSymbols));
if (recordType.restFieldType != null) {
checkForExportableType(recordType.restFieldType.tsymbol, pos, visitedSymbols);
}
return;
}
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) symbolType;
if (tableType.constraint != null) {
checkForExportableType(tableType.constraint.tsymbol, pos, visitedSymbols);
}
return;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) symbolType;
if (streamType.constraint != null) {
checkForExportableType(streamType.constraint.tsymbol, pos, visitedSymbols);
}
return;
case TypeTags.INVOKABLE:
BInvokableType invokableType = (BInvokableType) symbolType;
if (Symbols.isFlagOn(invokableType.flags, Flags.ANY_FUNCTION)) {
return;
}
if (invokableType.paramTypes != null) {
for (BType paramType : invokableType.paramTypes) {
checkForExportableType(paramType.tsymbol, pos, visitedSymbols);
}
}
if (invokableType.restType != null) {
checkForExportableType(invokableType.restType.tsymbol, pos, visitedSymbols);
}
checkForExportableType(invokableType.retType.tsymbol, pos, visitedSymbols);
return;
case TypeTags.PARAMETERIZED_TYPE:
BTypeSymbol parameterizedType = ((BParameterizedType) symbolType).paramValueType.tsymbol;
checkForExportableType(parameterizedType, pos, visitedSymbols);
return;
case TypeTags.ERROR:
if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) {
checkForExportableType((((BErrorType) symbolType).detailType.tsymbol), pos, visitedSymbols);
return;
}
break;
case TypeTags.TYPEREFDESC:
symbolType = Types.getReferredType(symbolType);
checkForExportableType(symbolType.tsymbol, pos, visitedSymbols);
return;
}
if (!Symbols.isPublic(symbol)) {
dlog.warning(pos, DiagnosticWarningCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name);
}
}
@Override
public void visit(BLangLetExpression letExpression, AnalyzerData data) {
int ownerSymTag = data.env.scope.owner.tag;
if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD) {
dlog.error(letExpression.pos, DiagnosticErrorCode.LET_EXPRESSION_NOT_YET_SUPPORTED_RECORD_FIELD);
} else if ((ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) {
dlog.error(letExpression.pos, DiagnosticErrorCode.LET_EXPRESSION_NOT_YET_SUPPORTED_OBJECT_FIELD);
}
data.env = letExpression.env;
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
analyzeNode((BLangNode) letVariable.definitionNode, data);
}
analyzeExpr(letExpression.expr, data);
}
@Override
public void visit(BLangSimpleVariable varNode, AnalyzerData data) {
analyzeTypeNode(varNode.typeNode, data);
analyzeExpr(varNode.expr, data);
if (Objects.isNull(varNode.symbol)) {
return;
}
if (!Symbols.isPublic(varNode.symbol)) {
return;
}
int ownerSymTag = data.env.scope.owner.tag;
if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD || (ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) {
analyzeExportableTypeRef(data.env.scope.owner, varNode.getBType().tsymbol, false, varNode.pos);
} else if ((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
analyzeExportableTypeRef(varNode.symbol, varNode.getBType().tsymbol, false, varNode.pos);
}
varNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
private boolean isValidInferredArray(BLangNode node) {
switch (node.getKind()) {
case INTERSECTION_TYPE_NODE:
case UNION_TYPE_NODE:
return isValidInferredArray(node.parent);
case VARIABLE:
BLangSimpleVariable varNode = (BLangSimpleVariable) node;
BLangExpression expr = varNode.expr;
return expr != null && isValidContextForInferredArray(node.parent) &&
isValidVariableForInferredArray(expr);
default:
return false;
}
}
private boolean isValidContextForInferredArray(BLangNode node) {
switch (node.getKind()) {
case PACKAGE:
case EXPR_FUNCTION_BODY:
case BLOCK_FUNCTION_BODY:
case BLOCK:
return true;
case VARIABLE_DEF:
return isValidContextForInferredArray(node.parent);
default:
return false;
}
}
private boolean isValidVariableForInferredArray(BLangNode node) {
switch (node.getKind()) {
case LITERAL:
if (node.getBType().tag == TypeTags.ARRAY) {
return true;
}
break;
case LIST_CONSTRUCTOR_EXPR:
return true;
case GROUP_EXPR:
return isValidVariableForInferredArray(((BLangGroupExpr) node).expression);
}
return false;
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable, AnalyzerData data) {
if (bLangTupleVariable.typeNode != null) {
analyzeNode(bLangTupleVariable.typeNode, data);
}
analyzeExpr(bLangTupleVariable.expr, data);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable, AnalyzerData data) {
if (bLangRecordVariable.typeNode != null) {
analyzeNode(bLangRecordVariable.typeNode, data);
}
analyzeExpr(bLangRecordVariable.expr, data);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable, AnalyzerData data) {
if (bLangErrorVariable.typeNode != null) {
analyzeNode(bLangErrorVariable.typeNode, data);
}
analyzeExpr(bLangErrorVariable.expr, data);
}
@Override
public void visit(BLangIdentifier identifierNode, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangAnnotation annotationNode, AnalyzerData data) {
annotationNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangAnnotationAttachment annAttachmentNode, AnalyzerData data) {
analyzeExpr(annAttachmentNode.expr, data);
BAnnotationSymbol annotationSymbol = annAttachmentNode.annotationSymbol;
if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(annAttachmentNode.annotationName.toString(), annotationSymbol, annAttachmentNode.pos);
}
}
@Override
public void visit(BLangSimpleVariableDef varDefNode, AnalyzerData data) {
analyzeNode(varDefNode.var, data);
}
@Override
public void visit(BLangCompoundAssignment compoundAssignment, AnalyzerData data) {
BLangValueExpression varRef = compoundAssignment.varRef;
analyzeExpr(varRef, data);
analyzeExpr(compoundAssignment.expr, data);
}
@Override
public void visit(BLangAssignment assignNode, AnalyzerData data) {
BLangExpression varRef = assignNode.varRef;
analyzeExpr(varRef, data);
analyzeExpr(assignNode.expr, data);
}
@Override
public void visit(BLangRecordDestructure stmt, AnalyzerData data) {
List<BLangExpression> varRefs = getVarRefs(stmt.varRef);
this.checkDuplicateVarRefs(varRefs);
analyzeExpr(stmt.varRef, data);
analyzeExpr(stmt.expr, data);
}
@Override
public void visit(BLangErrorDestructure stmt, AnalyzerData data) {
List<BLangExpression> varRefs = getVarRefs(stmt.varRef);
this.checkDuplicateVarRefs(varRefs);
analyzeExpr(stmt.varRef, data);
analyzeExpr(stmt.expr, data);
}
@Override
public void visit(BLangTupleDestructure stmt, AnalyzerData data) {
List<BLangExpression> varRefs = getVarRefs(stmt.varRef);
this.checkDuplicateVarRefs(varRefs);
analyzeExpr(stmt.varRef, data);
analyzeExpr(stmt.expr, data);
}
private void checkDuplicateVarRefs(List<BLangExpression> varRefs) {
checkDuplicateVarRefs(varRefs, new HashSet<>());
}
private List<BLangExpression> getVarRefs(BLangRecordVarRef varRef) {
List<BLangExpression> varRefs = varRef.recordRefFields.stream()
.map(e -> e.variableReference).collect(Collectors.toList());
if (varRef.restParam != null) {
varRefs.add(varRef.restParam);
}
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangErrorVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>();
if (varRef.message != null) {
varRefs.add(varRef.message);
}
if (varRef.cause != null) {
varRefs.add(varRef.cause);
}
varRefs.addAll(varRef.detail.stream().map(e -> e.expr).collect(Collectors.toList()));
if (varRef.restVar != null) {
varRefs.add(varRef.restVar);
}
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangTupleVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>(varRef.expressions);
if (varRef.restParam != null) {
varRefs.add(varRef.restParam);
}
return varRefs;
}
@Override
public void visit(BLangBreak breakNode, AnalyzerData data) {
if (data.loopCount == 0) {
this.dlog.error(breakNode.pos, DiagnosticErrorCode.BREAK_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction(data)) {
this.dlog.error(breakNode.pos, DiagnosticErrorCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
if (data.loopAlterNotAllowed) {
this.dlog.error(breakNode.pos, DiagnosticErrorCode.BREAK_NOT_ALLOWED);
}
}
@Override
public void visit(BLangPanic panicNode, AnalyzerData data) {
analyzeExpr(panicNode.expr, data);
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode, AnalyzerData data) {
}
@Override
public void visit(BLangClientDeclarationStatement clientDeclarationStatement, AnalyzerData data) {
analyzeNode(clientDeclarationStatement.clientDeclaration, data);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode, AnalyzerData data) {
BLangExpression expr = exprStmtNode.expr;
analyzeExpr(expr, data);
}
private boolean isTopLevel(SymbolEnv env) {
return env.enclInvokable.body == env.node;
}
private boolean isInWorker(SymbolEnv env) {
return env.enclInvokable.flagSet.contains(Flag.WORKER);
}
private boolean isCommunicationAllowedLocation(SymbolEnv env) {
return isTopLevel(env);
}
private boolean isDefaultWorkerCommunication(String workerIdentifier) {
return workerIdentifier.equals(DEFAULT_WORKER_NAME);
}
private boolean workerExists(BType type, String workerName, SymbolEnv env) {
if (isDefaultWorkerCommunication(workerName) && isInWorker(env)) {
return true;
}
if (type == symTable.semanticError) {
return false;
}
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.FUTURE && ((BFutureType) refType).workerDerivative;
}
@Override
public void visit(BLangWorkerSend workerSendNode, AnalyzerData data) {
BSymbol receiver =
symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(workerSendNode.workerIdentifier));
if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {
receiver = symTable.notFoundSymbol;
}
verifyPeerCommunication(workerSendNode.pos, receiver, workerSendNode.workerIdentifier.value, data.env);
WorkerActionSystem was = data.workerActionSystemStack.peek();
BType type = workerSendNode.expr.getBType();
if (type == symTable.semanticError) {
was.hasErrors = true;
} else if (workerSendNode.expr instanceof ActionNode) {
this.dlog.error(workerSendNode.expr.pos, DiagnosticErrorCode.INVALID_SEND_EXPR);
} else if (!types.isAssignable(type, symTable.cloneableType)) {
this.dlog.error(workerSendNode.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND, type);
}
String workerName = workerSendNode.workerIdentifier.getValue();
if (data.withinQuery || (!isCommunicationAllowedLocation(data.env) && !data.inInternallyDefinedBlockStmt)) {
this.dlog.error(workerSendNode.pos, DiagnosticErrorCode.UNSUPPORTED_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerSendNode.getBType(), workerName, data.env)
|| (!isWorkerFromFunction(data.env, names.fromString(workerName)) && !workerName.equals("function"))) {
this.dlog.error(workerSendNode.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerSendNode.setBType(
createAccumulatedErrorTypeForMatchingReceive(workerSendNode.pos, workerSendNode.expr.getBType(), data));
was.addWorkerAction(workerSendNode);
analyzeExpr(workerSendNode.expr, data);
validateActionParentNode(workerSendNode.pos, workerSendNode.expr);
}
private BType createAccumulatedErrorTypeForMatchingReceive(Location pos, BType exprType, AnalyzerData data) {
Set<BType> returnTypesUpToNow = data.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>() {
{
Comparator.comparing(BType::toString);
}
};
for (BType returnType : returnTypesUpToNow) {
if (onlyContainErrors(returnType)) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(pos, DiagnosticErrorCode.WORKER_SEND_AFTER_RETURN);
}
}
returnTypeAndSendType.add(exprType);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return exprType;
}
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr, AnalyzerData data) {
BSymbol receiver =
symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(syncSendExpr.workerIdentifier));
if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {
receiver = symTable.notFoundSymbol;
}
verifyPeerCommunication(syncSendExpr.pos, receiver, syncSendExpr.workerIdentifier.value, data.env);
validateActionParentNode(syncSendExpr.pos, syncSendExpr);
String workerName = syncSendExpr.workerIdentifier.getValue();
WorkerActionSystem was = data.workerActionSystemStack.peek();
if (data.withinQuery || (!isCommunicationAllowedLocation(data.env) && !data.inInternallyDefinedBlockStmt)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNSUPPORTED_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(syncSendExpr.workerType, workerName, data.env)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, syncSendExpr.workerSymbol);
was.hasErrors = true;
}
syncSendExpr.setBType(
createAccumulatedErrorTypeForMatchingReceive(syncSendExpr.pos, syncSendExpr.expr.getBType(), data));
was.addWorkerAction(syncSendExpr);
analyzeExpr(syncSendExpr.expr, data);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode, AnalyzerData data) {
validateActionParentNode(workerReceiveNode.pos, workerReceiveNode);
BSymbol sender =
symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(workerReceiveNode.workerIdentifier));
if ((sender.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {
sender = symTable.notFoundSymbol;
}
verifyPeerCommunication(workerReceiveNode.pos, sender, workerReceiveNode.workerIdentifier.value, data.env);
WorkerActionSystem was = data.workerActionSystemStack.peek();
String workerName = workerReceiveNode.workerIdentifier.getValue();
if (data.withinQuery || (!isCommunicationAllowedLocation(data.env) && !data.inInternallyDefinedBlockStmt)) {
this.dlog.error(workerReceiveNode.pos, DiagnosticErrorCode.INVALID_WORKER_RECEIVE_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerReceiveNode.workerType, workerName, data.env)) {
this.dlog.error(workerReceiveNode.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerReceiveNode.matchingSendsError = createAccumulatedErrorTypeForMatchingSyncSend(workerReceiveNode, data);
was.addWorkerAction(workerReceiveNode);
}
private void verifyPeerCommunication(Location pos, BSymbol otherWorker, String otherWorkerName, SymbolEnv env) {
if (env.enclEnv.node.getKind() != NodeKind.FUNCTION) {
return;
}
BLangFunction funcNode = (BLangFunction) env.enclEnv.node;
Set<Flag> flagSet = funcNode.flagSet;
Name workerDerivedName = names.fromString("0" + otherWorker.name.value);
if (flagSet.contains(Flag.WORKER)) {
if (otherWorkerName.equals(DEFAULT_WORKER_NAME)) {
if (flagSet.contains(Flag.FORKED)) {
dlog.error(pos, DiagnosticErrorCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS);
}
return;
}
Scope enclFunctionScope = env.enclEnv.enclEnv.scope;
BInvokableSymbol wLambda = (BInvokableSymbol) enclFunctionScope.lookup(workerDerivedName).symbol;
if (wLambda != null && funcNode.anonForkName != null
&& !funcNode.anonForkName.equals(wLambda.enclForkName)) {
dlog.error(pos, DiagnosticErrorCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS);
}
} else {
BInvokableSymbol wLambda = (BInvokableSymbol) env.scope.lookup(workerDerivedName).symbol;
if (wLambda != null && wLambda.enclForkName != null) {
dlog.error(pos, DiagnosticErrorCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS);
}
}
}
public BType createAccumulatedErrorTypeForMatchingSyncSend(BLangWorkerReceive workerReceiveNode,
AnalyzerData data) {
Set<BType> returnTypesUpToNow = data.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>();
for (BType returnType : returnTypesUpToNow) {
if (onlyContainErrors(returnType)) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(workerReceiveNode.pos, DiagnosticErrorCode.WORKER_RECEIVE_AFTER_RETURN);
}
}
returnTypeAndSendType.add(symTable.nilType);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return symTable.nilType;
}
}
private boolean onlyContainErrors(BType returnType) {
if (returnType == null) {
return false;
}
returnType = types.getTypeWithEffectiveIntersectionTypes(returnType);
returnType = Types.getReferredType(returnType);
if (returnType.tag == TypeTags.ERROR) {
return true;
}
if (returnType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) returnType).getMemberTypes()) {
BType t = types.getTypeWithEffectiveIntersectionTypes(memberType);
if (t.tag != TypeTags.ERROR) {
return false;
}
}
return true;
}
return false;
}
@Override
public void visit(BLangLiteral literalExpr, AnalyzerData data) {
}
@Override
public void visit(BLangConstRef constRef, AnalyzerData data) {
}
@Override
public void visit(BLangListConstructorExpr listConstructorExpr, AnalyzerData data) {
for (BLangExpression expr : listConstructorExpr.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
expr = ((BLangListConstructorSpreadOpExpr) expr).expr;
}
analyzeExpr(expr, data);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr, AnalyzerData data) {
analyzeExprs(tableConstructorExpr.recordLiteralList, data);
}
@Override
public void visit(BLangRecordLiteral recordLiteral, AnalyzerData data) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
analyzeExpr(((BLangRecordKeyValueField) field).valueExpr, data);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
analyzeExpr((BLangRecordLiteral.BLangRecordVarNameField) field, data);
} else {
analyzeExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, data);
}
}
Set<Object> names = new HashSet<>();
Set<Object> neverTypedKeys = new HashSet<>();
BType literalBType = recordLiteral.getBType();
BType type = Types.getReferredType(literalBType);
boolean isRecord = type.tag == TypeTags.RECORD;
boolean isOpenRecord = isRecord && !((BRecordType) type).sealed;
boolean isInferredRecordForMapCET = isRecord && recordLiteral.expectedType != null &&
recordLiteral.expectedType.tag == TypeTags.MAP;
BLangRecordLiteral.BLangRecordSpreadOperatorField inclusiveTypeSpreadField = null;
for (RecordLiteralNode.RecordField field : fields) {
BLangExpression keyExpr;
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
BLangExpression spreadOpExpr = spreadOpField.expr;
analyzeExpr(spreadOpExpr, data);
BType spreadOpExprType = Types.getReferredType(spreadOpExpr.getBType());
int spreadFieldTypeTag = spreadOpExprType.tag;
if (spreadFieldTypeTag == TypeTags.MAP) {
if (inclusiveTypeSpreadField != null) {
this.dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.MULTIPLE_INCLUSIVE_TYPES);
continue;
}
inclusiveTypeSpreadField = spreadOpField;
if (fields.size() > 1) {
if (names.size() > 0) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.SPREAD_FIELD_MAY_DULPICATE_ALREADY_SPECIFIED_KEYS,
spreadOpExpr);
}
continue;
}
}
if (spreadFieldTypeTag != TypeTags.RECORD) {
continue;
}
BRecordType spreadExprRecordType = (BRecordType) spreadOpExprType;
boolean isSpreadExprRecordTypeSealed = spreadExprRecordType.sealed;
if (!isSpreadExprRecordTypeSealed) {
if (inclusiveTypeSpreadField != null) {
this.dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.MULTIPLE_INCLUSIVE_TYPES);
} else {
inclusiveTypeSpreadField = spreadOpField;
}
}
LinkedHashMap<String, BField> fieldsInRecordType = getUnescapedFieldList(spreadExprRecordType.fields);
for (Object fieldName : names) {
if (!fieldsInRecordType.containsKey(fieldName) && !isSpreadExprRecordTypeSealed) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.SPREAD_FIELD_MAY_DULPICATE_ALREADY_SPECIFIED_KEYS,
spreadOpExpr);
break;
}
}
for (String fieldName : fieldsInRecordType.keySet()) {
BField bField = fieldsInRecordType.get(fieldName);
if (names.contains(fieldName)) {
if (bField.type.tag != TypeTags.NEVER) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.DUPLICATE_KEY_IN_RECORD_LITERAL_SPREAD_OP,
type.getKind().typeName(), fieldName, spreadOpField);
}
continue;
}
if (bField.type.tag == TypeTags.NEVER) {
neverTypedKeys.add(fieldName);
continue;
}
if (!neverTypedKeys.remove(fieldName) &&
inclusiveTypeSpreadField != null && isSpreadExprRecordTypeSealed) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.POSSIBLE_DUPLICATE_OF_FIELD_SPECIFIED_VIA_SPREAD_OP,
Types.getReferredType(recordLiteral.expectedType).getKind().typeName(),
bField.symbol, spreadOpField);
}
names.add(fieldName);
}
} else {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKey key = ((BLangRecordKeyValueField) field).key;
keyExpr = key.expr;
if (key.computedKey) {
analyzeExpr(keyExpr, data);
continue;
}
} else {
keyExpr = (BLangRecordLiteral.BLangRecordVarNameField) field;
}
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
String name = ((BLangSimpleVarRef) keyExpr).variableName.value;
String unescapedName = Utils.unescapeJava(name);
if (names.contains(unescapedName)) {
this.dlog.error(keyExpr.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_RECORD_LITERAL,
Types.getReferredType(recordLiteral.expectedType).getKind().typeName(),
unescapedName);
} else if (inclusiveTypeSpreadField != null && !neverTypedKeys.contains(unescapedName)) {
this.dlog.error(keyExpr.pos,
DiagnosticErrorCode.POSSIBLE_DUPLICATE_OF_FIELD_SPECIFIED_VIA_SPREAD_OP,
unescapedName, inclusiveTypeSpreadField);
}
if (!isInferredRecordForMapCET && isOpenRecord && !((BRecordType) type).fields.containsKey(name)) {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_IDENTIFIER_KEY,
unescapedName);
}
names.add(unescapedName);
} else if (keyExpr.getKind() == NodeKind.LITERAL || keyExpr.getKind() == NodeKind.NUMERIC_LITERAL) {
Object name = ((BLangLiteral) keyExpr).value;
if (names.contains(name)) {
this.dlog.error(keyExpr.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_RECORD_LITERAL,
Types.getReferredType(recordLiteral.parent.getBType())
.getKind().typeName(), name);
} else if (inclusiveTypeSpreadField != null && !neverTypedKeys.contains(name)) {
this.dlog.error(keyExpr.pos,
DiagnosticErrorCode.POSSIBLE_DUPLICATE_OF_FIELD_SPECIFIED_VIA_SPREAD_OP,
name, inclusiveTypeSpreadField);
}
names.add(name);
}
}
}
if (isInferredRecordForMapCET) {
recordLiteral.expectedType = type;
}
}
@Override
public void visit(BLangRecordLiteral.BLangRecordVarNameField node, AnalyzerData data) {
visit((BLangSimpleVarRef) node, data);
}
private LinkedHashMap<String, BField> getUnescapedFieldList(LinkedHashMap<String, BField> fieldMap) {
LinkedHashMap<String, BField> newMap = new LinkedHashMap<>();
for (String key : fieldMap.keySet()) {
newMap.put(Utils.unescapeJava(key), fieldMap.get(key));
}
return newMap;
}
@Override
public void visit(BLangSimpleVarRef varRefExpr, AnalyzerData data) {
switch (varRefExpr.parent.getKind()) {
case WORKER_RECEIVE:
case WORKER_SEND:
case WORKER_SYNC_SEND:
return;
default:
if (varRefExpr.getBType() != null && varRefExpr.getBType().tag == TypeTags.FUTURE) {
trackNamedWorkerReferences(varRefExpr, data);
}
}
BSymbol symbol = varRefExpr.symbol;
if (symbol != null && Symbols.isFlagOn(symbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(varRefExpr.variableName.toString(), symbol, varRefExpr.pos);
}
}
private void trackNamedWorkerReferences(BLangSimpleVarRef varRefExpr, AnalyzerData data) {
if (varRefExpr.symbol == null || (varRefExpr.symbol.flags & Flags.WORKER) != Flags.WORKER) {
return;
}
data.workerReferences.computeIfAbsent(varRefExpr.symbol, s -> new LinkedHashSet<>());
data.workerReferences.get(varRefExpr.symbol).add(varRefExpr);
}
@Override
public void visit(BLangRecordVarRef varRefExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangErrorVarRef varRefExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangTupleVarRef varRefExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr, AnalyzerData data) {
analyzeFieldBasedAccessExpr(fieldAccessExpr, data);
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess,
AnalyzerData data) {
analyzeFieldBasedAccessExpr(nsPrefixedFieldBasedAccess, data);
}
private void analyzeFieldBasedAccessExpr(BLangFieldBasedAccess fieldAccessExpr, AnalyzerData data) {
BLangExpression expr = fieldAccessExpr.expr;
analyzeExpr(expr, data);
BSymbol symbol = fieldAccessExpr.symbol;
if (symbol != null && Symbols.isFlagOn(fieldAccessExpr.symbol.flags, Flags.DEPRECATED)) {
String deprecatedConstruct = generateDeprecatedConstructString(expr, fieldAccessExpr.field.toString(),
symbol);
dlog.warning(fieldAccessExpr.pos, DiagnosticWarningCode.USAGE_OF_DEPRECATED_CONSTRUCT, deprecatedConstruct);
}
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr, AnalyzerData data) {
analyzeExpr(indexAccessExpr.indexExpr, data);
analyzeExpr(indexAccessExpr.expr, data);
}
@Override
public void visit(BLangInvocation invocationExpr, AnalyzerData data) {
analyzeExpr(invocationExpr.expr, data);
analyzeExprs(invocationExpr.requiredArgs, data);
analyzeExprs(invocationExpr.restArgs, data);
validateInvocationInMatchGuard(invocationExpr);
if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) {
BSymbol funcSymbol = invocationExpr.symbol;
if (Symbols.isFlagOn(funcSymbol.flags, Flags.TRANSACTIONAL) && !data.withinTransactionScope) {
dlog.error(invocationExpr.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED);
return;
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWarningForInvocation(invocationExpr);
}
}
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr, AnalyzerData data) {
analyzeExprs(errorConstructorExpr.positionalArgs, data);
if (!errorConstructorExpr.namedArgs.isEmpty()) {
analyzeExprs(errorConstructorExpr.namedArgs, data);
}
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation, AnalyzerData data) {
validateInvocationInMatchGuard(actionInvocation);
if (!actionInvocation.async && !data.withinTransactionScope &&
Symbols.isFlagOn(actionInvocation.symbol.flags, Flags.TRANSACTIONAL)) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED,
actionInvocation.symbol);
return;
}
if (actionInvocation.async && data.withinTransactionScope &&
!Symbols.isFlagOn(actionInvocation.symbol.flags, Flags.TRANSACTIONAL)) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.USAGE_OF_START_WITHIN_TRANSACTION_IS_PROHIBITED);
return;
}
analyzeExpr(actionInvocation.expr, data);
analyzeExprs(actionInvocation.requiredArgs, data);
analyzeExprs(actionInvocation.restArgs, data);
if (actionInvocation.symbol != null && actionInvocation.symbol.kind == SymbolKind.FUNCTION &&
Symbols.isFlagOn(actionInvocation.symbol.flags, Flags.DEPRECATED)) {
logDeprecatedWarningForInvocation(actionInvocation);
}
if (actionInvocation.flagSet.contains(Flag.TRANSACTIONAL) && !data.withinTransactionScope) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED);
return;
}
if (actionInvocation.async && data.withinLockBlock) {
dlog.error(actionInvocation.pos, actionInvocation.functionPointerInvocation ?
DiagnosticErrorCode.USAGE_OF_WORKER_WITHIN_LOCK_IS_PROHIBITED :
DiagnosticErrorCode.USAGE_OF_START_WITHIN_LOCK_IS_PROHIBITED);
return;
}
if (actionInvocation.symbol != null &&
(actionInvocation.symbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.INVALID_FUNCTIONAL_CONSTRUCTOR_INVOCATION,
actionInvocation.symbol);
return;
}
validateActionInvocation(actionInvocation.pos, actionInvocation);
if (!actionInvocation.async && data.withinTransactionScope) {
actionInvocation.invokedInsideTransaction = true;
}
}
@Override
public void visit(BLangInvocation.BLangResourceAccessInvocation resourceActionInvocation, AnalyzerData data) {
validateInvocationInMatchGuard(resourceActionInvocation);
analyzeExpr(resourceActionInvocation.expr, data);
analyzeExprs(resourceActionInvocation.requiredArgs, data);
analyzeExprs(resourceActionInvocation.restArgs, data);
analyzeExpr(resourceActionInvocation.resourceAccessPathSegments, data);
resourceActionInvocation.invokedInsideTransaction = data.withinTransactionScope;
if (Symbols.isFlagOn(resourceActionInvocation.symbol.flags, Flags.TRANSACTIONAL) &&
!data.withinTransactionScope) {
dlog.error(resourceActionInvocation.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED);
return;
}
if (Symbols.isFlagOn(resourceActionInvocation.symbol.flags, Flags.DEPRECATED)) {
logDeprecatedWarningForInvocation(resourceActionInvocation);
}
validateActionInvocation(resourceActionInvocation.pos, resourceActionInvocation);
}
private void logDeprecatedWarningForInvocation(BLangInvocation invocationExpr) {
String deprecatedConstruct = invocationExpr.name.toString();
BLangExpression expr = invocationExpr.expr;
BSymbol funcSymbol = invocationExpr.symbol;
if (expr != null) {
deprecatedConstruct = generateDeprecatedConstructString(expr, deprecatedConstruct, funcSymbol);
} else if (!Names.DOT.equals(funcSymbol.pkgID.name)) {
deprecatedConstruct = funcSymbol.pkgID + ":" + deprecatedConstruct;
}
dlog.warning(invocationExpr.pos, DiagnosticWarningCode.USAGE_OF_DEPRECATED_CONSTRUCT, deprecatedConstruct);
}
private String generateDeprecatedConstructString(BLangExpression expr, String fieldOrMethodName,
BSymbol symbol) {
BType bType = expr.getBType();
if (bType.tag == TypeTags.TYPEREFDESC) {
return bType + "." + fieldOrMethodName;
}
if (bType.tag == TypeTags.OBJECT) {
BObjectType objectType = (BObjectType) bType;
if (objectType.classDef == null || objectType.classDef.internal == false) {
fieldOrMethodName = bType + "." + fieldOrMethodName;
}
return fieldOrMethodName;
}
if (symbol.kind == SymbolKind.FUNCTION && !Names.DOT.equals(symbol.pkgID.name)) {
fieldOrMethodName = symbol.pkgID + ":" + fieldOrMethodName;
}
return fieldOrMethodName;
}
private void validateActionInvocation(Location pos, BLangInvocation iExpr) {
if (iExpr.expr != null) {
final NodeKind clientNodeKind = iExpr.expr.getKind();
if (clientNodeKind == NodeKind.FIELD_BASED_ACCESS_EXPR) {
final BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) iExpr.expr;
if (fieldBasedAccess.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
} else {
final BLangSimpleVarRef selfName = (BLangSimpleVarRef) fieldBasedAccess.expr;
if (!Names.SELF.equals(selfName.symbol.name)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
}
} else if (clientNodeKind != NodeKind.SIMPLE_VARIABLE_REF &&
clientNodeKind != NodeKind.GROUP_EXPR) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
}
validateActionParentNode(pos, iExpr);
}
/**
* Actions can only occur as part of a statement or nested inside other actions.
*/
private boolean validateActionParentNode(Location pos, BLangNode node) {
BLangNode parent = node.parent;
while (parent != null) {
final NodeKind kind = parent.getKind();
if (parent instanceof StatementNode || checkActionInQuery(kind)) {
return true;
} else if (parent instanceof ActionNode || parent instanceof BLangVariable || kind == NodeKind.CHECK_EXPR ||
kind == NodeKind.CHECK_PANIC_EXPR || kind == NodeKind.TRAP_EXPR || kind == NodeKind.GROUP_EXPR ||
kind == NodeKind.TYPE_CONVERSION_EXPR) {
if (parent instanceof BLangInvocation.BLangActionInvocation) {
break;
}
parent = parent.parent;
continue;
}
break;
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
return false;
}
private boolean checkActionInQuery(NodeKind parentKind) {
return parentKind == NodeKind.FROM || parentKind == NodeKind.SELECT ||
parentKind == NodeKind.LET_CLAUSE;
}
@Override
public void visit(BLangTypeInit cIExpr, AnalyzerData data) {
analyzeExprs(cIExpr.argsExpr, data);
analyzeExpr(cIExpr.initInvocation, data);
BType type = cIExpr.getBType();
if (cIExpr.userDefinedType != null && Symbols.isFlagOn(type.tsymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(((BLangUserDefinedType) cIExpr.userDefinedType).typeName.toString(), type.tsymbol,
cIExpr.pos);
}
}
@Override
public void visit(BLangTernaryExpr ternaryExpr, AnalyzerData data) {
analyzeExpr(ternaryExpr.expr, data);
analyzeExpr(ternaryExpr.thenExpr, data);
analyzeExpr(ternaryExpr.elseExpr, data);
}
@Override
public void visit(BLangWaitExpr awaitExpr, AnalyzerData data) {
BLangExpression expr = awaitExpr.getExpression();
boolean validWaitFuture = validateWaitFutureExpr(expr);
analyzeExpr(expr, data);
boolean validActionParent = validateActionParentNode(awaitExpr.pos, awaitExpr);
WorkerActionSystem was = data.workerActionSystemStack.peek();
was.addWorkerAction(awaitExpr, data.env);
was.hasErrors = !(validWaitFuture || validActionParent);
}
@Override
public void visit(BLangWaitForAllExpr waitForAllExpr, AnalyzerData data) {
boolean validWaitFuture = true;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyValue : waitForAllExpr.keyValuePairs) {
BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr;
validWaitFuture = validWaitFuture && validateWaitFutureExpr(expr);
analyzeExpr(expr, data);
}
boolean validActionParent = validateActionParentNode(waitForAllExpr.pos, waitForAllExpr);
WorkerActionSystem was = data.workerActionSystemStack.peek();
was.addWorkerAction(waitForAllExpr, data.env);
was.hasErrors = !(validWaitFuture || validActionParent);
}
private boolean validateWaitFutureExpr(BLangExpression expr) {
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR) {
dlog.error(expr.pos, DiagnosticErrorCode.INVALID_WAIT_MAPPING_CONSTRUCTORS);
return false;
}
if (expr instanceof ActionNode) {
dlog.error(expr.pos, DiagnosticErrorCode.INVALID_WAIT_ACTIONS);
return false;
}
return true;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess, AnalyzerData data) {
analyzeExpr(xmlElementAccess.expr, data);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation, AnalyzerData data) {
analyzeExpr(xmlNavigation.expr, data);
if (xmlNavigation.childIndex != null) {
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS
|| xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.UNSUPPORTED_MEMBER_ACCESS_IN_XML_NAVIGATION);
}
analyzeExpr(xmlNavigation.childIndex, data);
}
validateMethodInvocationsInXMLNavigationExpression(xmlNavigation);
}
private void validateMethodInvocationsInXMLNavigationExpression(BLangXMLNavigationAccess expression) {
if (!expression.methodInvocationAnalyzed && expression.parent.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expression.parent;
if (invocation.argExprs.contains(expression)
&& ((invocation.symbol.flags & Flags.LANG_LIB) != Flags.LANG_LIB)) {
return;
}
dlog.error(invocation.pos, DiagnosticErrorCode.UNSUPPORTED_METHOD_INVOCATION_XML_NAV);
}
expression.methodInvocationAnalyzed = true;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr, AnalyzerData data) {
BLangIdentifier flushWrkIdentifier = workerFlushExpr.workerIdentifier;
Stack<WorkerActionSystem> workerActionSystems = data.workerActionSystemStack;
WorkerActionSystem currentWrkerAction = workerActionSystems.peek();
List<BLangWorkerSend> sendStmts = getAsyncSendStmtsOfWorker(currentWrkerAction);
if (flushWrkIdentifier != null) {
List<BLangWorkerSend> sendsToGivenWrkr = sendStmts.stream()
.filter(bLangNode -> bLangNode.workerIdentifier.equals
(flushWrkIdentifier))
.collect(Collectors.toList());
if (sendsToGivenWrkr.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.INVALID_WORKER_FLUSH_FOR_WORKER,
workerFlushExpr.workerSymbol, currentWrkerAction.currentWorkerId());
return;
} else {
sendStmts = sendsToGivenWrkr;
}
} else {
if (sendStmts.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.INVALID_WORKER_FLUSH,
currentWrkerAction.currentWorkerId());
return;
}
}
workerFlushExpr.cachedWorkerSendStmts = sendStmts;
validateActionParentNode(workerFlushExpr.pos, workerFlushExpr);
}
private List<BLangWorkerSend> getAsyncSendStmtsOfWorker(WorkerActionSystem currentWorkerAction) {
List<BLangNode> actions = currentWorkerAction.workerActionStateMachines.peek().actions;
return actions.stream()
.filter(CodeAnalyzer::isWorkerSend)
.map(bLangNode -> (BLangWorkerSend) bLangNode)
.collect(Collectors.toList());
}
@Override
public void visit(BLangTrapExpr trapExpr, AnalyzerData data) {
analyzeExpr(trapExpr.expr, data);
}
@Override
public void visit(BLangBinaryExpr binaryExpr, AnalyzerData data) {
if (validateBinaryExpr(binaryExpr)) {
analyzeExpr(binaryExpr.lhsExpr, data);
analyzeExpr(binaryExpr.rhsExpr, data);
}
}
private boolean validateBinaryExpr(BLangBinaryExpr binaryExpr) {
if (binaryExpr.lhsExpr.getBType().tag != TypeTags.FUTURE
&& binaryExpr.rhsExpr.getBType().tag != TypeTags.FUTURE) {
return true;
}
BLangNode parentNode = binaryExpr.parent;
if (binaryExpr.lhsExpr.getBType().tag == TypeTags.FUTURE
|| binaryExpr.rhsExpr.getBType().tag == TypeTags.FUTURE) {
if (parentNode == null) {
return false;
}
if (parentNode.getKind() == NodeKind.WAIT_EXPR) {
return true;
}
}
if (parentNode.getKind() != NodeKind.BINARY_EXPR && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.BITWISE_OR,
symTable.futureType);
return false;
}
if (parentNode.getKind() == NodeKind.BINARY_EXPR) {
return validateBinaryExpr((BLangBinaryExpr) parentNode);
}
return true;
}
@Override
public void visit(BLangElvisExpr elvisExpr, AnalyzerData data) {
analyzeExpr(elvisExpr.lhsExpr, data);
analyzeExpr(elvisExpr.rhsExpr, data);
}
@Override
public void visit(BLangGroupExpr groupExpr, AnalyzerData data) {
analyzeExpr(groupExpr.expression, data);
}
@Override
public void visit(BLangUnaryExpr unaryExpr, AnalyzerData data) {
analyzeExpr(unaryExpr.expr, data);
}
@Override
public void visit(BLangTypedescExpr accessExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr, AnalyzerData data) {
analyzeExpr(conversionExpr.expr, data);
conversionExpr.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangXMLQName xmlQName, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangXMLAttribute xmlAttribute, AnalyzerData data) {
analyzeExpr(xmlAttribute.name, data);
analyzeExpr(xmlAttribute.value, data);
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral, AnalyzerData data) {
analyzeExpr(xmlElementLiteral.startTagName, data);
analyzeExpr(xmlElementLiteral.endTagName, data);
analyzeExprs(xmlElementLiteral.attributes, data);
analyzeExprs(xmlElementLiteral.children, data);
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral, AnalyzerData data) {
analyzeExprs(xmlSequenceLiteral.xmlItems, data);
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral, AnalyzerData data) {
analyzeExprs(xmlTextLiteral.textFragments, data);
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral, AnalyzerData data) {
analyzeExprs(xmlCommentLiteral.textFragments, data);
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral, AnalyzerData data) {
analyzeExprs(xmlProcInsLiteral.dataFragments, data);
analyzeExpr(xmlProcInsLiteral.target, data);
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString, AnalyzerData data) {
analyzeExprs(xmlQuotedString.textFragments, data);
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral, AnalyzerData data) {
analyzeExprs(stringTemplateLiteral.exprs, data);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral, AnalyzerData data) {
analyzeExprs(rawTemplateLiteral.strings, data);
analyzeExprs(rawTemplateLiteral.insertions, data);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction, AnalyzerData data) {
boolean isWorker = false;
analyzeNode(bLangLambdaFunction.function, data);
if (bLangLambdaFunction.function.flagSet.contains(Flag.TRANSACTIONAL) &&
bLangLambdaFunction.function.flagSet.contains(Flag.WORKER) && !data.withinTransactionScope) {
dlog.error(bLangLambdaFunction.pos, DiagnosticErrorCode.TRANSACTIONAL_WORKER_OUT_OF_TRANSACTIONAL_SCOPE,
bLangLambdaFunction);
return;
}
if (bLangLambdaFunction.parent.getKind() == NodeKind.VARIABLE) {
String workerVarName = ((BLangSimpleVariable) bLangLambdaFunction.parent).name.value;
if (workerVarName.startsWith(WORKER_LAMBDA_VAR_PREFIX)) {
String workerName = workerVarName.substring(1);
isWorker = true;
data.workerActionSystemStack.peek().startWorkerActionStateMachine(workerName,
bLangLambdaFunction.function.pos,
bLangLambdaFunction.function);
}
}
if (isWorker) {
this.visitFunction(bLangLambdaFunction.function, data);
} else {
try {
this.initNewWorkerActionSystem(data);
data.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME,
bLangLambdaFunction.pos,
bLangLambdaFunction.function);
this.visitFunction(bLangLambdaFunction.function, data);
data.workerActionSystemStack.peek().endWorkerActionStateMachine();
} finally {
this.finalizeCurrentWorkerActionSystem(data);
}
}
if (isWorker) {
data.workerActionSystemStack.peek().endWorkerActionStateMachine();
}
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction, AnalyzerData data) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
if (prevDefaultValueState == DefaultValueState.RECORD_FIELD_DEFAULT ||
prevDefaultValueState == DefaultValueState.OBJECT_FIELD_INITIALIZER) {
data.defaultValueState = DefaultValueState.FUNCTION_IN_DEFAULT_VALUE;
}
analyzeExpr(bLangArrowFunction.body.expr, data);
data.defaultValueState = prevDefaultValueState;
}
/* Type Nodes */
@Override
public void visit(BLangRecordTypeNode recordTypeNode, AnalyzerData data) {
data.env = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, data.env);
for (BLangSimpleVariable field : recordTypeNode.fields) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
data.defaultValueState = DefaultValueState.RECORD_FIELD_DEFAULT;
analyzeNode(field, data);
data.defaultValueState = prevDefaultValueState;
}
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode, AnalyzerData data) {
data.env = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, data.env);
for (BLangSimpleVariable field : objectTypeNode.fields) {
analyzeNode(field, data);
}
List<BLangFunction> bLangFunctionList = new ArrayList<>(objectTypeNode.functions);
if (objectTypeNode.initFunction != null) {
bLangFunctionList.add(objectTypeNode.initFunction);
}
bLangFunctionList.sort(Comparator.comparingInt(function -> function.pos.lineRange().startLine().line()));
for (BLangFunction function : bLangFunctionList) {
analyzeNode(function, data);
}
}
@Override
public void visit(BLangValueType valueType, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangArrayType arrayType, AnalyzerData data) {
if (containsInferredArraySizesOfHigherDimensions(arrayType.sizes)) {
dlog.error(arrayType.pos, DiagnosticErrorCode.INFER_SIZE_ONLY_SUPPORTED_IN_FIRST_DIMENSION);
} else if (isSizeInferredArray(arrayType.sizes) && !isValidInferredArray(arrayType.parent)) {
dlog.error(arrayType.pos, DiagnosticErrorCode.CANNOT_INFER_SIZE_ARRAY_SIZE_FROM_THE_CONTEXT);
}
analyzeTypeNode(arrayType.elemtype, data);
}
private boolean isSizeInferredArray(List<BLangExpression> indexSizes) {
return !indexSizes.isEmpty() && isInferredArrayIndicator(indexSizes.get(indexSizes.size() - 1));
}
private boolean isInferredArrayIndicator(BLangExpression size) {
return size.getKind() == LITERAL && ((BLangLiteral) size).value.equals(Constants.INFERRED_ARRAY_INDICATOR);
}
private boolean containsInferredArraySizesOfHigherDimensions(List<BLangExpression> sizes) {
if (sizes.size() < 2) {
return false;
}
for (int i = 0; i < sizes.size() - 1; i++) {
if (isInferredArrayIndicator(sizes.get(i))) {
return true;
}
}
return false;
}
@Override
public void visit(BLangBuiltInRefTypeNode builtInRefType, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangConstrainedType constrainedType, AnalyzerData data) {
analyzeTypeNode(constrainedType.constraint, data);
}
@Override
public void visit(BLangStreamType streamType, AnalyzerData data) {
analyzeTypeNode(streamType.constraint, data);
analyzeTypeNode(streamType.error, data);
}
@Override
public void visit(BLangTableTypeNode tableType, AnalyzerData data) {
analyzeTypeNode(tableType.constraint, data);
if (tableType.tableKeyTypeConstraint != null) {
analyzeTypeNode(tableType.tableKeyTypeConstraint.keyType, data);
}
}
@Override
public void visit(BLangErrorType errorType, AnalyzerData data) {
BLangType detailType = errorType.detailType;
if (detailType != null && detailType.getKind() == NodeKind.CONSTRAINED_TYPE) {
BLangType constraint = ((BLangConstrainedType) detailType).constraint;
if (constraint.getKind() == NodeKind.USER_DEFINED_TYPE) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) constraint;
if (userDefinedType.typeName.value.equals(TypeDefBuilderHelper.INTERSECTED_ERROR_DETAIL)) {
return;
}
}
}
analyzeTypeNode(errorType.detailType, data);
}
@Override
public void visit(BLangUserDefinedType userDefinedType, AnalyzerData data) {
BTypeSymbol typeSymbol = userDefinedType.getBType().tsymbol;
if (typeSymbol != null && Symbols.isFlagOn(typeSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(userDefinedType.typeName.toString(), typeSymbol, userDefinedType.pos);
}
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode, AnalyzerData data) {
tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, data));
analyzeTypeNode(tupleTypeNode.restParamType, data);
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode, AnalyzerData data) {
unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, data));
}
@Override
public void visit(BLangIntersectionTypeNode intersectionTypeNode, AnalyzerData data) {
for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) {
analyzeTypeNode(constituentTypeNode, data);
}
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode, AnalyzerData data) {
if (functionTypeNode.flagSet.contains(Flag.ANY_FUNCTION)) {
return;
}
functionTypeNode.params.forEach(node -> analyzeNode(node, data));
analyzeTypeNode(functionTypeNode.returnTypeNode, data);
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode, AnalyzerData data) {
/* Ignore */
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression, AnalyzerData data) {
analyzeExpr(bLangVarArgsExpression.expr, data);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression, AnalyzerData data) {
analyzeExpr(bLangNamedArgsExpression.expr, data);
}
@Override
public void visit(BLangCheckedExpr checkedExpr, AnalyzerData data) {
data.failVisited = true;
analyzeExpr(checkedExpr.expr, data);
if (data.env.scope.owner.getKind() == SymbolKind.PACKAGE) {
return;
}
BLangInvokableNode enclInvokable = data.env.enclInvokable;
List<BType> equivalentErrorTypeList = checkedExpr.equivalentErrorTypeList;
if (equivalentErrorTypeList != null && !equivalentErrorTypeList.isEmpty()) {
if (data.defaultValueState == DefaultValueState.RECORD_FIELD_DEFAULT) {
dlog.error(checkedExpr.pos,
DiagnosticErrorCode.INVALID_USAGE_OF_CHECK_IN_RECORD_FIELD_DEFAULT_EXPRESSION);
return;
}
if (data.defaultValueState == DefaultValueState.OBJECT_FIELD_INITIALIZER) {
BAttachedFunction initializerFunc =
((BObjectTypeSymbol) getEnclosingClass(data.env).getBType().tsymbol).initializerFunc;
if (initializerFunc == null) {
dlog.error(checkedExpr.pos,
DiagnosticErrorCode
.INVALID_USAGE_OF_CHECK_IN_OBJECT_FIELD_INITIALIZER_IN_OBJECT_WITH_NO_INIT_METHOD);
return;
}
BType exprErrorTypes = getErrorTypes(checkedExpr.expr.getBType());
BType initMethodReturnType = initializerFunc.type.retType;
if (!types.isAssignable(exprErrorTypes, initMethodReturnType)) {
dlog.error(checkedExpr.pos, DiagnosticErrorCode
.INVALID_USAGE_OF_CHECK_IN_OBJECT_FIELD_INITIALIZER_WITH_INIT_METHOD_RETURN_TYPE_MISMATCH,
initMethodReturnType, exprErrorTypes);
}
return;
}
}
if (enclInvokable == null) {
return;
}
BType exprType = enclInvokable.getReturnTypeNode().getBType();
BType checkedExprType = checkedExpr.expr.getBType();
BType errorType = getErrorTypes(checkedExprType);
if (errorType == symTable.semanticError) {
return;
}
if (!data.failureHandled && !types.isAssignable(errorType, exprType) &&
!types.isNeverTypeOrStructureTypeWithARequiredNeverMember(checkedExprType)) {
dlog.error(checkedExpr.pos,
DiagnosticErrorCode.CHECKED_EXPR_NO_MATCHING_ERROR_RETURN_IN_ENCL_INVOKABLE);
}
if (!data.errorTypes.empty()) {
data.errorTypes.peek().add(getErrorTypes(checkedExpr.expr.getBType()));
}
BType errorTypes;
if (exprType.tag == TypeTags.UNION) {
errorTypes = types.getErrorType((BUnionType) exprType);
} else {
errorTypes = exprType;
}
data.returnTypes.peek().add(errorTypes);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanicExpr, AnalyzerData data) {
analyzeExpr(checkPanicExpr.expr, data);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr, AnalyzerData data) {
}
@Override
public void visit(BLangQueryExpr queryExpr, AnalyzerData data) {
boolean prevQueryToTableWithKey = data.queryToTableWithKey;
data.queryToTableWithKey = queryExpr.isTable() && !queryExpr.fieldNameIdentifierList.isEmpty();
data.queryToMap = queryExpr.isMap;
boolean prevWithinQuery = data.withinQuery;
data.withinQuery = true;
int fromCount = 0;
for (BLangNode clause : queryExpr.getQueryClauses()) {
if (clause.getKind() == NodeKind.FROM) {
fromCount++;
BLangFromClause fromClause = (BLangFromClause) clause;
BLangExpression collection = (BLangExpression) fromClause.getCollection();
if (fromCount > 1) {
if (TypeTags.STREAM == Types.getReferredType(collection.getBType()).tag) {
this.dlog.error(collection.pos, DiagnosticErrorCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM);
}
}
}
analyzeNode(clause, data);
}
data.withinQuery = prevWithinQuery;
data.queryToTableWithKey = prevQueryToTableWithKey;
}
@Override
public void visit(BLangQueryAction queryAction, AnalyzerData data) {
boolean prevFailureHandled = data.failureHandled;
data.failureHandled = true;
boolean prevWithinQuery = data.withinQuery;
data.withinQuery = true;
int fromCount = 0;
for (BLangNode clause : queryAction.getQueryClauses()) {
if (clause.getKind() == NodeKind.FROM) {
fromCount++;
BLangFromClause fromClause = (BLangFromClause) clause;
BLangExpression collection = (BLangExpression) fromClause.getCollection();
if (fromCount > 1) {
if (TypeTags.STREAM == Types.getReferredType(collection.getBType()).tag) {
this.dlog.error(collection.pos, DiagnosticErrorCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM);
}
}
}
analyzeNode(clause, data);
}
validateActionParentNode(queryAction.pos, queryAction);
data.failureHandled = prevFailureHandled;
data.withinQuery = prevWithinQuery;
}
@Override
public void visit(BLangFromClause fromClause, AnalyzerData data) {
analyzeExpr(fromClause.collection, data);
}
@Override
public void visit(BLangJoinClause joinClause, AnalyzerData data) {
analyzeExpr(joinClause.collection, data);
if (joinClause.onClause != null) {
analyzeNode(joinClause.onClause, data);
}
}
@Override
public void visit(BLangLetClause letClause, AnalyzerData data) {
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
analyzeNode((BLangNode) letVariable.definitionNode.getVariable(), data);
}
}
@Override
public void visit(BLangWhereClause whereClause, AnalyzerData data) {
analyzeExpr(whereClause.expression, data);
}
@Override
public void visit(BLangOnClause onClause, AnalyzerData data) {
analyzeExpr(onClause.lhsExpr, data);
analyzeExpr(onClause.rhsExpr, data);
}
@Override
public void visit(BLangOrderByClause orderByClause, AnalyzerData data) {
orderByClause.orderByKeyList.forEach(value -> analyzeExpr((BLangExpression) value.getOrderKey(), data));
}
@Override
public void visit(BLangSelectClause selectClause, AnalyzerData data) {
analyzeExpr(selectClause.expression, data);
}
@Override
public void visit(BLangOnConflictClause onConflictClause, AnalyzerData data) {
analyzeExpr(onConflictClause.expression, data);
if (!(data.queryToTableWithKey || data.queryToMap)) {
dlog.error(onConflictClause.pos,
DiagnosticErrorCode.ON_CONFLICT_ONLY_WORKS_WITH_MAPS_OR_TABLES_WITH_KEY_SPECIFIER);
}
}
@Override
public void visit(BLangDoClause doClause, AnalyzerData data) {
analyzeNode(doClause.body, data);
}
@Override
public void visit(BLangOnFailClause onFailClause, AnalyzerData data) {
boolean currentFailVisited = data.failVisited;
data.failVisited = false;
VariableDefinitionNode onFailVarDefNode = onFailClause.variableDefinitionNode;
if (onFailVarDefNode != null) {
BLangVariable onFailVarNode = (BLangVariable) onFailVarDefNode.getVariable();
for (BType errorType : data.errorTypes.peek()) {
if (!types.isAssignable(errorType, onFailVarNode.getBType())) {
dlog.error(onFailVarNode.pos, DiagnosticErrorCode.INCOMPATIBLE_ON_FAIL_ERROR_DEFINITION, errorType,
onFailVarNode.getBType());
}
}
}
analyzeNode(onFailClause.body, data);
onFailClause.bodyContainsFail = data.failVisited;
data.failVisited = currentFailVisited;
}
@Override
public void visit(BLangLimitClause limitClause, AnalyzerData data) {
analyzeExpr(limitClause.expression, data);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr, AnalyzerData data) {
BLangExpression expr = typeTestExpr.expr;
analyzeNode(expr, data);
BType exprType = expr.getBType();
BType typeNodeType = typeTestExpr.typeNode.getBType();
if (typeNodeType == symTable.semanticError || exprType == symTable.semanticError) {
return;
}
if (types.isAssignable(exprType, typeNodeType)) {
if (typeTestExpr.isNegation) {
dlog.hint(typeTestExpr.pos, DiagnosticHintCode.EXPRESSION_ALWAYS_FALSE);
return;
}
if (types.isNeverTypeOrStructureTypeWithARequiredNeverMember(exprType)) {
dlog.hint(typeTestExpr.pos, DiagnosticHintCode.UNNECESSARY_CONDITION_FOR_VARIABLE_OF_TYPE_NEVER);
return;
}
dlog.hint(typeTestExpr.pos, DiagnosticHintCode.UNNECESSARY_CONDITION);
return;
}
if (!intersectionExists(expr, typeNodeType, data, typeTestExpr.pos)) {
dlog.error(typeTestExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CHECK, exprType, typeNodeType);
}
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr, AnalyzerData data) {
analyzeExpr(annotAccessExpr.expr, data);
BAnnotationSymbol annotationSymbol = annotAccessExpr.annotationSymbol;
if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(annotAccessExpr.annotationName.toString(), annotationSymbol, annotAccessExpr.pos);
}
}
@Override
public void visit(BLangRegExpTemplateLiteral regExpTemplateLiteral, AnalyzerData data) {
List<BLangExpression> interpolationsList =
symResolver.getListOfInterpolations(regExpTemplateLiteral.reDisjunction.sequenceList);
interpolationsList.forEach(interpolation -> analyzeExpr(interpolation, data));
}
private void logDeprecatedWaring(String deprecatedConstruct, BSymbol symbol, Location pos) {
if (!Names.DOT.equals(symbol.pkgID.name)) {
deprecatedConstruct = symbol.pkgID + ":" + deprecatedConstruct;
}
dlog.warning(pos, DiagnosticWarningCode.USAGE_OF_DEPRECATED_CONSTRUCT, deprecatedConstruct);
}
private boolean intersectionExists(BLangExpression expression, BType testType, AnalyzerData data,
Location intersectionPos) {
BType expressionType = expression.getBType();
BType intersectionType = types.getTypeIntersection(
Types.IntersectionContext.typeTestIntersectionExistenceContext(intersectionPos),
expressionType, testType, data.env);
return (intersectionType != symTable.semanticError) ||
(expressionType.tag == TypeTags.ANY && testType.tag == TypeTags.READONLY);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr, AnalyzerData data) {
/* Ignore */
}
private <E extends BLangExpression> void analyzeExpr(E node, AnalyzerData data) {
if (node == null) {
return;
}
SymbolEnv prevEnv = data.env;
BLangNode parent = data.parent;
node.parent = data.parent;
data.parent = node;
node.accept(this, data);
data.parent = parent;
checkAccess(node, data);
checkExpressionValidity(node, data);
data.env = prevEnv;
}
private <E extends BLangExpression> void checkExpressionValidity(E exprNode, AnalyzerData data) {
if (exprNode.getKind() == NodeKind.GROUP_EXPR ||
!types.isNeverTypeOrStructureTypeWithARequiredNeverMember(exprNode.getBType())) {
return;
}
if (!checkExpressionInValidParent(exprNode.parent, data)) {
dlog.error(exprNode.pos, DiagnosticErrorCode.EXPRESSION_OF_NEVER_TYPE_NOT_ALLOWED);
}
}
private boolean checkExpressionInValidParent(BLangNode currentParent, AnalyzerData data) {
if (currentParent == null) {
return false;
}
if (currentParent.getKind() == NodeKind.GROUP_EXPR) {
return checkExpressionInValidParent(currentParent.parent, data);
}
return currentParent.getKind() == NodeKind.EXPRESSION_STATEMENT ||
(currentParent.getKind() == NodeKind.VARIABLE &&
((BLangSimpleVariable) data.parent).typeNode.getBType().tag == TypeTags.FUTURE)
|| currentParent.getKind() == NodeKind.TRAP_EXPR;
}
@Override
public void visit(BLangConstant constant, AnalyzerData data) {
analyzeTypeNode(constant.typeNode, data);
analyzeNode(constant.expr, data);
analyzeExportableTypeRef(constant.symbol, constant.symbol.type.tsymbol, false, constant.pos);
constant.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
/**
* This method checks for private symbols being accessed or used outside of package and|or private symbols being
* used in public fields of objects/records and will fail those occurrences.
*
* @param node expression node to analyze
* @param data data used to analyze the node
*/
private <E extends BLangExpression> void checkAccess(E node, AnalyzerData data) {
if (node.getBType() != null) {
checkAccessSymbol(node.getBType().tsymbol, data.env.enclPkg.symbol.pkgID, node.pos);
}
if (node.getKind() == NodeKind.INVOCATION) {
BLangInvocation bLangInvocation = (BLangInvocation) node;
checkAccessSymbol(bLangInvocation.symbol, data.env.enclPkg.symbol.pkgID, bLangInvocation.pos);
}
}
private void checkAccessSymbol(BSymbol symbol, PackageID pkgID, Location position) {
if (symbol == null) {
return;
}
if (!pkgID.equals(symbol.pkgID) && !Symbols.isPublic(symbol)) {
dlog.error(position, DiagnosticErrorCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name);
}
}
private <E extends BLangExpression> void analyzeExprs(List<E> nodeList, AnalyzerData data) {
for (int i = 0; i < nodeList.size(); i++) {
analyzeExpr(nodeList.get(i), data);
}
}
private void initNewWorkerActionSystem(AnalyzerData data) {
data.workerActionSystemStack.push(new WorkerActionSystem());
}
private void finalizeCurrentWorkerActionSystem(AnalyzerData data) {
WorkerActionSystem was = data.workerActionSystemStack.pop();
if (!was.hasErrors) {
this.validateWorkerInteractions(was, data);
}
}
private static boolean isWorkerSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SEND;
}
private static boolean isWorkerSyncSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SYNC_SEND;
}
private static boolean isWaitAction(BLangNode action) {
return action.getKind() == NodeKind.WAIT_EXPR;
}
private String extractWorkerId(BLangNode action) {
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).workerIdentifier.value;
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).workerIdentifier.value;
} else {
return ((BLangWorkerReceive) action).workerIdentifier.value;
}
}
private void validateWorkerInteractions(WorkerActionSystem workerActionSystem, AnalyzerData data) {
if (!validateWorkerInteractionsAfterWaitAction(workerActionSystem)) {
return;
}
BLangNode currentAction;
boolean systemRunning;
data.workerSystemMovementSequence = 0;
int systemIterationCount = 0;
int prevWorkerSystemMovementSequence = data.workerSystemMovementSequence;
do {
systemRunning = false;
systemIterationCount++;
for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) {
if (worker.done()) {
continue;
}
currentAction = worker.currentAction();
if (isWaitAction(currentAction)) {
handleWaitAction(workerActionSystem, currentAction, worker, data);
systemRunning = true;
continue;
}
if (!isWorkerSend(currentAction) && !isWorkerSyncSend(currentAction)) {
continue;
}
WorkerActionStateMachine otherSM = workerActionSystem.find(this.extractWorkerId(currentAction));
if (otherSM.done()) {
continue;
}
if (isWaitAction(otherSM.currentAction())) {
systemRunning = false;
continue;
}
if (!otherSM.currentIsReceive(worker.workerId)) {
continue;
}
BLangWorkerReceive receive = (BLangWorkerReceive) otherSM.currentAction();
if (isWorkerSyncSend(currentAction)) {
this.validateWorkerActionParameters((BLangWorkerSyncSendExpr) currentAction, receive);
} else {
this.validateWorkerActionParameters((BLangWorkerSend) currentAction, receive);
}
otherSM.next();
data.workerSystemMovementSequence++;
worker.next();
data.workerSystemMovementSequence++;
systemRunning = true;
String channelName = generateChannelName(worker.workerId, otherSM.workerId);
otherSM.node.sendsToThis.add(channelName);
worker.node.sendsToThis.add(channelName);
}
if (systemIterationCount > workerActionSystem.finshedWorkers.size()) {
systemIterationCount = 0;
if (prevWorkerSystemMovementSequence == data.workerSystemMovementSequence) {
systemRunning = false;
}
prevWorkerSystemMovementSequence = data.workerSystemMovementSequence;
}
} while (systemRunning);
if (!workerActionSystem.everyoneDone()) {
this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem);
}
}
private boolean validateWorkerInteractionsAfterWaitAction(WorkerActionSystem workerActionSystem) {
boolean isValid = true;
for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) {
Set<String> waitingOnWorkerSet = new HashSet<>();
for (BLangNode action : worker.actions) {
if (isWaitAction(action)) {
if (action instanceof BLangWaitForAllExpr) {
BLangWaitForAllExpr waitForAllExpr = (BLangWaitForAllExpr) action;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair : waitForAllExpr.keyValuePairs) {
BSymbol workerSymbol = getWorkerSymbol(keyValuePair);
if (workerSymbol != null) {
waitingOnWorkerSet.add(workerSymbol.name.value);
}
}
} else {
BLangWaitExpr wait = (BLangWaitExpr) action;
for (String workerName : getWorkerNameList(wait.exprList.get(0),
workerActionSystem.getActionEnvironment(wait))) {
waitingOnWorkerSet.add(workerName);
}
}
} else if (isWorkerSend(action)) {
BLangWorkerSend send = (BLangWorkerSend) action;
if (waitingOnWorkerSet.contains(send.workerIdentifier.value)) {
dlog.error(action.pos, DiagnosticErrorCode.WORKER_INTERACTION_AFTER_WAIT_ACTION, action);
isValid = false;
}
} else if (isWorkerSyncSend(action)) {
BLangWorkerSyncSendExpr syncSend = (BLangWorkerSyncSendExpr) action;
if (waitingOnWorkerSet.contains(syncSend.workerIdentifier.value)) {
dlog.error(action.pos, DiagnosticErrorCode.WORKER_INTERACTION_AFTER_WAIT_ACTION, action);
isValid = false;
}
} else if (action.getKind() == NodeKind.WORKER_RECEIVE) {
BLangWorkerReceive receive = (BLangWorkerReceive) action;
if (waitingOnWorkerSet.contains(receive.workerIdentifier.value)) {
dlog.error(action.pos, DiagnosticErrorCode.WORKER_INTERACTION_AFTER_WAIT_ACTION, action);
isValid = false;
}
}
}
}
return isValid;
}
private void handleWaitAction(WorkerActionSystem workerActionSystem, BLangNode currentAction,
WorkerActionStateMachine worker, AnalyzerData data) {
if (currentAction instanceof BLangWaitForAllExpr) {
boolean allWorkersAreDone = true;
BLangWaitForAllExpr waitForAllExpr = (BLangWaitForAllExpr) currentAction;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair : waitForAllExpr.keyValuePairs) {
BSymbol workerSymbol = getWorkerSymbol(keyValuePair);
if (isWorkerSymbol(workerSymbol)) {
Name workerName = workerSymbol.name;
if (isWorkerFromFunction(workerActionSystem.getActionEnvironment(currentAction), workerName)) {
WorkerActionStateMachine otherSM = workerActionSystem.find(workerName.value);
allWorkersAreDone = allWorkersAreDone && otherSM.done();
}
}
}
if (allWorkersAreDone) {
worker.next();
data.workerSystemMovementSequence++;
}
} else {
BLangWaitExpr wait = (BLangWaitExpr) currentAction;
List<String> workerNameList = getWorkerNameList(wait.exprList.get(0),
workerActionSystem.getActionEnvironment(currentAction));
if (workerNameList.isEmpty()) {
worker.next();
data.workerSystemMovementSequence++;
}
for (String workerName : workerNameList) {
var otherSM = workerActionSystem.find(workerName);
if (otherSM.done()) {
worker.next();
data.workerSystemMovementSequence++;
break;
}
}
}
}
private BSymbol getWorkerSymbol(BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair) {
BLangExpression value = keyValuePair.getValue();
if (value != null && value.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) value).symbol;
} else if (keyValuePair.keyExpr != null && keyValuePair.keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyValuePair.keyExpr).symbol;
}
return null;
}
private List<String> getWorkerNameList(BLangExpression expr, SymbolEnv functionEnv) {
ArrayList<String> workerNames = new ArrayList<>();
populateWorkerNameList(expr, workerNames, functionEnv);
return workerNames;
}
private void populateWorkerNameList(BLangExpression expr, ArrayList<String> workerNames, SymbolEnv functionEnv) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) expr;
populateWorkerNameList(binaryExpr.lhsExpr, workerNames, functionEnv);
populateWorkerNameList(binaryExpr.rhsExpr, workerNames, functionEnv);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) expr;
if (isWorkerSymbol(varRef.symbol) && isWorkerFromFunction(functionEnv, varRef.symbol.name)) {
workerNames.add(varRef.variableName.value);
}
}
}
private boolean isWorkerFromFunction(SymbolEnv functionEnv, Name workerName) {
if (functionEnv == null) {
return false;
}
if (functionEnv.scope.lookup(workerName).symbol != null) {
return true;
}
if (functionEnv.enclInvokable != null) {
Set<Flag> flagSet = functionEnv.enclInvokable.flagSet;
if (flagSet.contains(Flag.LAMBDA) && !flagSet.contains(Flag.WORKER)) {
return false;
}
}
return isWorkerFromFunction(functionEnv.enclEnv, workerName);
}
private boolean isWorkerSymbol(BSymbol symbol) {
return symbol != null && (symbol.flags & Flags.WORKER) == Flags.WORKER;
}
private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) {
this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticErrorCode.INVALID_WORKER_INTERACTION,
workerActionSystem.toString());
}
private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) {
types.checkType(receive, send.getBType(), receive.getBType());
addImplicitCast(send.getBType(), receive);
NodeKind kind = receive.parent.getKind();
if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR ||
kind == NodeKind.FAIL) {
typeChecker.checkExpr((BLangExpression) receive.parent, receive.env);
}
receive.sendExpression = send.expr;
}
private void validateWorkerActionParameters(BLangWorkerSyncSendExpr send, BLangWorkerReceive receive) {
send.receive = receive;
NodeKind parentNodeKind = send.parent.getKind();
if (parentNodeKind == NodeKind.VARIABLE) {
BLangSimpleVariable variable = (BLangSimpleVariable) send.parent;
if (variable.isDeclaredWithVar) {
variable.setBType(variable.symbol.type = send.expectedType = receive.matchingSendsError);
}
} else if (parentNodeKind == NodeKind.ASSIGNMENT) {
BLangAssignment assignment = (BLangAssignment) send.parent;
if (assignment.varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol varSymbol = ((BLangSimpleVarRef) assignment.varRef).symbol;
if (varSymbol != null) {
send.expectedType = varSymbol.type;
}
}
}
if (receive.matchingSendsError != symTable.nilType && parentNodeKind == NodeKind.EXPRESSION_STATEMENT) {
dlog.error(send.pos, DiagnosticErrorCode.ASSIGNMENT_REQUIRED, send.workerSymbol);
} else {
types.checkType(send.pos, receive.matchingSendsError, send.expectedType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
types.checkType(receive, send.getBType(), receive.getBType());
addImplicitCast(send.getBType(), receive);
NodeKind kind = receive.parent.getKind();
if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR) {
typeChecker.checkExpr((BLangExpression) receive.parent, receive.env);
}
receive.sendExpression = send;
}
private void addImplicitCast(BType actualType, BLangWorkerReceive receive) {
if (receive.getBType() != null && receive.getBType() != symTable.semanticError) {
types.setImplicitCastExpr(receive, actualType, receive.getBType());
receive.setBType(actualType);
}
}
private boolean checkNextBreakValidityInTransaction(AnalyzerData data) {
return !data.loopWithinTransactionCheckStack.peek() && data.transactionCount > 0 && data.withinTransactionScope;
}
private boolean checkReturnValidityInTransaction(AnalyzerData data) {
return !data.returnWithinTransactionCheckStack.peek() && data.transactionCount > 0
&& data.withinTransactionScope;
}
private void validateModuleInitFunction(BLangFunction funcNode) {
if (funcNode.attachedFunction || !Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcNode.name.value)) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticErrorCode.MODULE_INIT_CANNOT_BE_PUBLIC);
}
if (!funcNode.requiredParams.isEmpty() || funcNode.restParam != null) {
this.dlog.error(funcNode.pos, DiagnosticErrorCode.MODULE_INIT_CANNOT_HAVE_PARAMS);
}
types.validateErrorOrNilReturn(funcNode, DiagnosticErrorCode.MODULE_INIT_RETURN_SHOULD_BE_ERROR_OR_NIL);
}
private BType getErrorTypes(BType bType) {
if (bType == null) {
return symTable.semanticError;
}
BType errorType = symTable.semanticError;
int tag = bType.tag;
if (tag == TypeTags.TYPEREFDESC) {
return getErrorTypes(Types.getReferredType(bType));
}
if (tag == TypeTags.ERROR) {
errorType = bType;
} else if (tag == TypeTags.READONLY) {
errorType = symTable.errorType;
} else if (tag == TypeTags.UNION) {
LinkedHashSet<BType> errTypes = new LinkedHashSet<>();
Set<BType> memTypes = ((BUnionType) bType).getMemberTypes();
for (BType memType : memTypes) {
BType memErrType = getErrorTypes(memType);
if (memErrType != symTable.semanticError) {
errTypes.add(memErrType);
}
}
if (!errTypes.isEmpty()) {
errorType = errTypes.size() == 1 ? errTypes.iterator().next() : BUnionType.create(null, errTypes);
}
}
return errorType;
}
/**
* This class contains the state machines for a set of workers.
*/
private static class WorkerActionSystem {
public List<WorkerActionStateMachine> finshedWorkers = new ArrayList<>();
private Stack<WorkerActionStateMachine> workerActionStateMachines = new Stack<>();
private Map<BLangNode, SymbolEnv> workerInteractionEnvironments = new IdentityHashMap<>();
private boolean hasErrors = false;
public void startWorkerActionStateMachine(String workerId, Location pos, BLangFunction node) {
workerActionStateMachines.push(new WorkerActionStateMachine(pos, workerId, node));
}
public void endWorkerActionStateMachine() {
finshedWorkers.add(workerActionStateMachines.pop());
}
public void addWorkerAction(BLangNode action) {
this.workerActionStateMachines.peek().actions.add(action);
}
public WorkerActionStateMachine find(String workerId) {
for (WorkerActionStateMachine worker : this.finshedWorkers) {
if (worker.workerId.equals(workerId)) {
return worker;
}
}
throw new AssertionError("Reference to non existing worker " + workerId);
}
public boolean everyoneDone() {
return this.finshedWorkers.stream().allMatch(WorkerActionStateMachine::done);
}
public Location getRootPosition() {
return this.finshedWorkers.iterator().next().pos;
}
@Override
public String toString() {
return this.finshedWorkers.toString();
}
public String currentWorkerId() {
return workerActionStateMachines.peek().workerId;
}
public void addWorkerAction(BLangNode action, SymbolEnv env) {
addWorkerAction(action);
this.workerInteractionEnvironments.put(action, env);
}
private SymbolEnv getActionEnvironment(BLangNode currentAction) {
return workerInteractionEnvironments.get(currentAction);
}
}
/**
* This class represents a state machine to maintain the state of the send/receive
* actions of a worker.
*/
private static class WorkerActionStateMachine {
private static final String WORKER_SM_FINISHED = "FINISHED";
public int currentState;
public List<BLangNode> actions = new ArrayList<>();
public Location pos;
public String workerId;
public BLangFunction node;
public WorkerActionStateMachine(Location pos, String workerId, BLangFunction node) {
this.pos = pos;
this.workerId = workerId;
this.node = node;
}
public boolean done() {
return this.actions.size() == this.currentState;
}
public BLangNode currentAction() {
return this.actions.get(this.currentState);
}
public boolean currentIsReceive(String sourceWorkerId) {
if (this.done()) {
return false;
}
BLangNode action = this.currentAction();
return !isWorkerSend(action) && !isWorkerSyncSend(action) && !isWaitAction(action)
&& ((BLangWorkerReceive) action).workerIdentifier.value.equals(sourceWorkerId);
}
public void next() {
this.currentState++;
}
@Override
public String toString() {
if (this.done()) {
return WORKER_SM_FINISHED;
} else {
BLangNode action = this.currentAction();
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).toActionString();
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).toActionString();
} else if (isWaitAction(action)) {
return action.toString();
} else {
return ((BLangWorkerReceive) action).toActionString();
}
}
}
}
public static String generateChannelName(String source, String target) {
return source + "->" + target;
}
private BLangNode getEnclosingClass(SymbolEnv env) {
BLangNode node = env.node;
while (node.getKind() != NodeKind.CLASS_DEFN) {
env = env.enclEnv;
node = env.node;
}
return node;
}
private void validateInvocationInMatchGuard(BLangInvocation invocation) {
BLangExpression matchedExpr = getMatchedExprIfCalledInMatchGuard(invocation);
if (matchedExpr == null) {
return;
}
BType matchedExprType = matchedExpr.getBType();
if (types.isInherentlyImmutableType(matchedExprType) ||
Symbols.isFlagOn(matchedExprType.flags, Flags.READONLY)) {
return;
}
BSymbol invocationSymbol = invocation.symbol;
if (invocationSymbol == null) {
BLangNode parent = invocation.parent;
if (parent == null || parent.getKind() != NodeKind.TYPE_INIT_EXPR) {
return;
}
BLangTypeInit newExpr = (BLangTypeInit) parent;
if (newExpr.getBType().tag != TypeTags.STREAM) {
return;
}
List<BLangExpression> argsExpr = newExpr.argsExpr;
if (argsExpr.isEmpty()) {
return;
}
BLangExpression streamImplementorExpr = argsExpr.get(0);
BType type = streamImplementorExpr.getBType();
if (!types.isInherentlyImmutableType(type) && !Symbols.isFlagOn(type.flags, Flags.READONLY)) {
dlog.error(streamImplementorExpr.pos,
DiagnosticErrorCode.INVALID_CALL_WITH_MUTABLE_ARGS_IN_MATCH_GUARD);
}
return;
}
long flags = invocationSymbol.flags;
boolean methodCall = Symbols.isFlagOn(flags, Flags.ATTACHED);
boolean callsNonIsolatedFunction = !Symbols.isFlagOn(flags, Flags.ISOLATED) ||
(methodCall && !Symbols.isFlagOn(invocationSymbol.owner.flags, Flags.ISOLATED));
if (callsNonIsolatedFunction) {
dlog.error(invocation.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_CALL_IN_MATCH_GUARD);
}
List<BLangExpression> args = new ArrayList<>(invocation.requiredArgs);
args.addAll(invocation.restArgs);
for (BLangExpression arg : args) {
BType type = arg.getBType();
if (type != symTable.semanticError &&
!types.isInherentlyImmutableType(type) &&
!Symbols.isFlagOn(type.flags, Flags.READONLY)) {
dlog.error(arg.pos, DiagnosticErrorCode.INVALID_CALL_WITH_MUTABLE_ARGS_IN_MATCH_GUARD);
}
}
}
private BLangExpression getMatchedExprIfCalledInMatchGuard(BLangInvocation invocation) {
BLangNode prevParent = invocation;
BLangNode parent = invocation.parent;
boolean encounteredMatchGuard = false;
while (parent != null) {
NodeKind parentKind = parent.getKind();
switch (parentKind) {
case LAMBDA:
case FUNCTION:
case RESOURCE_FUNC:
return null;
case MATCH_CLAUSE:
if (encounteredMatchGuard) {
return ((BLangMatchStatement) parent.parent).expr;
}
return null;
case MATCH_GUARD:
encounteredMatchGuard = true;
break;
case INVOCATION:
BLangInvocation parentInvocation = (BLangInvocation) parent;
if (parentInvocation.langLibInvocation || prevParent != parentInvocation.expr) {
return null;
}
}
prevParent = parent;
parent = parent.parent;
}
return null;
}
private enum DefaultValueState {
NOT_IN_DEFAULT_VALUE,
RECORD_FIELD_DEFAULT,
OBJECT_FIELD_INITIALIZER,
FUNCTION_IN_DEFAULT_VALUE
}
/**
* @since 2.0.0
*/
public static class AnalyzerData {
SymbolEnv env;
BLangNode parent;
int loopCount;
boolean loopAlterNotAllowed;
boolean inInternallyDefinedBlockStmt;
int workerSystemMovementSequence;
Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>();
Map<BSymbol, Set<BLangNode>> workerReferences = new HashMap<>();
int transactionCount;
boolean withinTransactionScope;
int commitCount;
int rollbackCount;
boolean commitRollbackAllowed;
int commitCountWithinBlock;
int rollbackCountWithinBlock;
Stack<Boolean> loopWithinTransactionCheckStack = new Stack<>();
Stack<Boolean> returnWithinTransactionCheckStack = new Stack<>();
Stack<Boolean> transactionalFuncCheckStack = new Stack<>();
boolean withinLockBlock;
boolean failureHandled;
boolean failVisited;
boolean queryToTableWithKey;
boolean withinQuery;
boolean queryToMap;
Stack<LinkedHashSet<BType>> returnTypes = new Stack<>();
Stack<LinkedHashSet<BType>> errorTypes = new Stack<>();
DefaultValueState defaultValueState = DefaultValueState.NOT_IN_DEFAULT_VALUE;
}
} | class CodeAnalyzer extends SimpleBLangNodeAnalyzer<CodeAnalyzer.AnalyzerData> {
private static final CompilerContext.Key<CodeAnalyzer> CODE_ANALYZER_KEY =
new CompilerContext.Key<>();
private final SymbolResolver symResolver;
private final SymbolTable symTable;
private final Types types;
private final BLangDiagnosticLog dlog;
private final TypeChecker typeChecker;
private final Names names;
private final ReachabilityAnalyzer reachabilityAnalyzer;
public static CodeAnalyzer getInstance(CompilerContext context) {
CodeAnalyzer codeGenerator = context.get(CODE_ANALYZER_KEY);
if (codeGenerator == null) {
codeGenerator = new CodeAnalyzer(context);
}
return codeGenerator;
}
public CodeAnalyzer(CompilerContext context) {
context.put(CODE_ANALYZER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeChecker = TypeChecker.getInstance(context);
this.names = Names.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.reachabilityAnalyzer = ReachabilityAnalyzer.getInstance(context);
}
public BLangPackage analyze(BLangPackage pkgNode) {
final AnalyzerData data = new AnalyzerData();
visitNode(pkgNode, data);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode, AnalyzerData data) {
this.dlog.setCurrentPackageId(pkgNode.packageID);
if (pkgNode.completedPhases.contains(CompilerPhase.CODE_ANALYZE)) {
return;
}
data.parent = pkgNode;
data.env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
analyzeTopLevelNodes(pkgNode, data);
pkgNode.getTestablePkgs().forEach(testablePackage -> visitNode(testablePackage, data));
}
@Override
public void visit(BLangTestablePackage node, AnalyzerData data) {
visit((BLangPackage) node, data);
}
private void analyzeTopLevelNodes(BLangPackage pkgNode, AnalyzerData data) {
List<TopLevelNode> topLevelNodes = pkgNode.topLevelNodes;
for (int i = 0; i < topLevelNodes.size(); i++) {
analyzeNode((BLangNode) topLevelNodes.get(i), data);
}
pkgNode.completedPhases.add(CompilerPhase.CODE_ANALYZE);
}
@Override
public void analyzeNode(BLangNode node, AnalyzerData data) {
SymbolEnv prevEnv = data.env;
BLangNode parent = data.parent;
node.parent = parent;
data.parent = node;
visitNode(node, data);
data.parent = parent;
data.env = prevEnv;
}
private void analyzeTypeNode(BLangType node, AnalyzerData data) {
if (node == null) {
return;
}
analyzeNode(node, data);
}
@Override
public void visit(BLangCompilationUnit compUnitNode, AnalyzerData data) {
compUnitNode.topLevelNodes.forEach(e -> analyzeNode((BLangNode) e, data));
}
@Override
public void visit(BLangTypeDefinition typeDefinition, AnalyzerData data) {
analyzeTypeNode(typeDefinition.typeNode, data);
typeDefinition.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangClassDefinition classDefinition, AnalyzerData data) {
data.env = SymbolEnv.createClassEnv(classDefinition, classDefinition.symbol.scope, data.env);
for (BLangSimpleVariable field : classDefinition.fields) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
data.defaultValueState = DefaultValueState.OBJECT_FIELD_INITIALIZER;
analyzeNode(field, data);
data.defaultValueState = prevDefaultValueState;
}
List<BLangFunction> bLangFunctionList = new ArrayList<>(classDefinition.functions);
if (classDefinition.initFunction != null) {
bLangFunctionList.add(classDefinition.initFunction);
}
bLangFunctionList.sort(Comparator.comparingInt(function -> function.pos.lineRange().startLine().line()));
for (BLangFunction function : bLangFunctionList) {
analyzeNode(function, data);
}
classDefinition.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangObjectConstructorExpression objectConstructorExpression, AnalyzerData data) {
visit(objectConstructorExpression.typeInit, data);
}
@Override
public void visit(BLangTupleVariableDef bLangTupleVariableDef, AnalyzerData data) {
analyzeNode(bLangTupleVariableDef.var, data);
}
@Override
public void visit(BLangRecordVariableDef bLangRecordVariableDef, AnalyzerData data) {
analyzeNode(bLangRecordVariableDef.var, data);
}
@Override
public void visit(BLangErrorVariableDef bLangErrorVariableDef, AnalyzerData data) {
analyzeNode(bLangErrorVariableDef.errorVariable, data);
}
@Override
public void visit(BLangResourceFunction funcNode, AnalyzerData data) {
visit((BLangFunction) funcNode, data);
}
@Override
public void visit(BLangFunction funcNode, AnalyzerData data) {
validateParams(funcNode, data);
analyzeNode(funcNode.returnTypeNode, data);
boolean isLambda = funcNode.flagSet.contains(Flag.LAMBDA);
if (isLambda) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
funcNode.symbol.params.forEach(symbol -> analyzeExportableTypeRef(funcNode.symbol, symbol.type.tsymbol,
true,
funcNode.pos));
if (funcNode.symbol.restParam != null) {
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.restParam.type.tsymbol, true,
funcNode.restParam.pos);
}
analyzeExportableTypeRef(funcNode.symbol, funcNode.symbol.retType.tsymbol, true,
funcNode.returnTypeNode.pos);
}
if (MAIN_FUNCTION_NAME.equals(funcNode.name.value)) {
new MainFunctionValidator(types, dlog).validateMainFunction(funcNode);
}
this.validateModuleInitFunction(funcNode);
try {
this.initNewWorkerActionSystem(data);
data.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME,
funcNode.pos,
funcNode);
this.visitFunction(funcNode, data);
data.workerActionSystemStack.peek().endWorkerActionStateMachine();
} finally {
this.finalizeCurrentWorkerActionSystem(data);
}
funcNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
validateNamedWorkerUniqueReferences(data);
}
private void validateNamedWorkerUniqueReferences(AnalyzerData data) {
for (var nodes : data.workerReferences.values()) {
if (nodes.size() > 1) {
for (BLangNode node: nodes) {
dlog.error(node.pos, DiagnosticErrorCode.ILLEGAL_WORKER_REFERENCE_AS_A_VARIABLE_REFERENCE, node);
}
}
}
data.workerReferences.clear();
}
private void validateParams(BLangFunction funcNode, AnalyzerData data) {
for (BLangSimpleVariable parameter : funcNode.requiredParams) {
analyzeNode(parameter, data);
}
if (funcNode.restParam != null) {
analyzeNode(funcNode.restParam, data);
}
}
private void visitFunction(BLangFunction funcNode, AnalyzerData data) {
data.env = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, data.env);
data.returnWithinTransactionCheckStack.push(true);
data.returnTypes.push(new LinkedHashSet<>());
data.transactionalFuncCheckStack.push(funcNode.flagSet.contains(Flag.TRANSACTIONAL));
if (Symbols.isNative(funcNode.symbol)) {
return;
}
if (isPublicInvokableNode(funcNode)) {
analyzeNode(funcNode.returnTypeNode, data);
}
/* the body can be null in the case of Object type function declarations */
if (funcNode.body != null) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
if (prevDefaultValueState == DefaultValueState.RECORD_FIELD_DEFAULT ||
prevDefaultValueState == DefaultValueState.OBJECT_FIELD_INITIALIZER) {
data.defaultValueState = DefaultValueState.FUNCTION_IN_DEFAULT_VALUE;
}
analyzeNode(funcNode.body, data);
data.defaultValueState = prevDefaultValueState;
}
reachabilityAnalyzer.analyzeReachability(funcNode, data.env);
data.returnTypes.pop();
data.returnWithinTransactionCheckStack.pop();
data.transactionalFuncCheckStack.pop();
}
private boolean isPublicInvokableNode(BLangInvokableNode invNode) {
return Symbols.isPublic(invNode.symbol) && (SymbolKind.PACKAGE.equals(invNode.symbol.owner.getKind()) ||
Symbols.isPublic(invNode.symbol.owner));
}
@Override
public void visit(BLangBlockFunctionBody body, AnalyzerData data) {
boolean prevWithinTxScope = data.withinTransactionScope;
boolean prevLoopAlterNotAllowed = data.loopAlterNotAllowed;
data.loopAlterNotAllowed = data.loopCount > 0;
if (!prevWithinTxScope) {
data.withinTransactionScope = data.transactionalFuncCheckStack.peek();
}
data.env = SymbolEnv.createFuncBodyEnv(body, data.env);
for (BLangStatement e : body.stmts) {
data.inInternallyDefinedBlockStmt = true;
analyzeNode(e, data);
}
data.inInternallyDefinedBlockStmt = false;
if (data.transactionalFuncCheckStack.peek()) {
data.withinTransactionScope = prevWithinTxScope;
}
data.loopAlterNotAllowed = prevLoopAlterNotAllowed;
}
@Override
public void visit(BLangExprFunctionBody body, AnalyzerData data) {
analyzeExpr(body.expr, data);
}
@Override
public void visit(BLangExternalFunctionBody body, AnalyzerData data) {
}
@Override
public void visit(BLangForkJoin forkJoin, AnalyzerData data) {
if (forkJoin.workers.isEmpty()) {
dlog.error(forkJoin.pos, DiagnosticErrorCode.INVALID_FOR_JOIN_SYNTAX_EMPTY_FORK);
}
}
@Override
public void visit(BLangTransaction transactionNode, AnalyzerData data) {
if (data.transactionalFuncCheckStack.peek()) {
this.dlog.error(transactionNode.pos,
DiagnosticErrorCode.TRANSACTION_CANNOT_BE_USED_WITHIN_TRANSACTIONAL_SCOPE);
return;
}
data.errorTypes.push(new LinkedHashSet<>());
boolean previousWithinTxScope = data.withinTransactionScope;
int previousCommitCount = data.commitCount;
int previousRollbackCount = data.rollbackCount;
boolean prevCommitRollbackAllowed = data.commitRollbackAllowed;
data.commitRollbackAllowed = true;
data.commitCount = 0;
data.rollbackCount = 0;
data.withinTransactionScope = true;
data.loopWithinTransactionCheckStack.push(false);
data.returnWithinTransactionCheckStack.push(false);
data.transactionCount++;
boolean failureHandled = data.failureHandled;
if (!failureHandled) {
data.failureHandled = transactionNode.onFailClause != null;
}
analyzeNode(transactionNode.transactionBody, data);
data.failureHandled = failureHandled;
if (data.commitCount < 1) {
this.dlog.error(transactionNode.pos, DiagnosticErrorCode.INVALID_COMMIT_COUNT);
}
data.transactionCount--;
data.withinTransactionScope = previousWithinTxScope;
data.commitCount = previousCommitCount;
data.rollbackCount = previousRollbackCount;
data.commitRollbackAllowed = prevCommitRollbackAllowed;
data.returnWithinTransactionCheckStack.pop();
data.loopWithinTransactionCheckStack.pop();
analyzeOnFailClause(transactionNode.onFailClause, data);
data.errorTypes.pop();
}
private void analyzeOnFailClause(BLangOnFailClause onFailClause, AnalyzerData data) {
if (onFailClause != null) {
analyzeNode(onFailClause, data);
}
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr, AnalyzerData data) {
}
@Override
public void visit(BLangCommitExpr commitExpr, AnalyzerData data) {
data.commitCount++;
data.commitCountWithinBlock++;
if (data.transactionCount == 0) {
this.dlog.error(commitExpr.pos, DiagnosticErrorCode.COMMIT_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
if (data.transactionalFuncCheckStack.peek()) {
this.dlog.error(commitExpr.pos, DiagnosticErrorCode.COMMIT_CANNOT_BE_WITHIN_TRANSACTIONAL_FUNCTION);
return;
}
if (!data.withinTransactionScope || !data.commitRollbackAllowed ||
data.loopWithinTransactionCheckStack.peek()) {
this.dlog.error(commitExpr.pos, DiagnosticErrorCode.COMMIT_NOT_ALLOWED);
return;
}
data.withinTransactionScope = false;
}
@Override
public void visit(BLangRollback rollbackNode, AnalyzerData data) {
data.rollbackCount++;
data.rollbackCountWithinBlock++;
if (data.transactionCount == 0 && !data.withinTransactionScope) {
this.dlog.error(rollbackNode.pos, DiagnosticErrorCode.ROLLBACK_CANNOT_BE_OUTSIDE_TRANSACTION_BLOCK);
return;
}
if (!data.transactionalFuncCheckStack.empty() && data.transactionalFuncCheckStack.peek()) {
this.dlog.error(rollbackNode.pos, DiagnosticErrorCode.ROLLBACK_CANNOT_BE_WITHIN_TRANSACTIONAL_FUNCTION);
return;
}
if (!data.withinTransactionScope || !data.commitRollbackAllowed ||
(!data.loopWithinTransactionCheckStack.empty() && data.loopWithinTransactionCheckStack.peek())) {
this.dlog.error(rollbackNode.pos, DiagnosticErrorCode.ROLLBACK_NOT_ALLOWED);
return;
}
data.withinTransactionScope = false;
analyzeExpr(rollbackNode.expr, data);
}
@Override
public void visit(BLangRetry retryNode, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!failureHandled) {
data.failureHandled = retryNode.onFailClause != null;
}
visitNode(retryNode.retrySpec, data);
visitNode(retryNode.retryBody, data);
data.failureHandled = failureHandled;
retryNode.retryBody.failureBreakMode = retryNode.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(retryNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangRetrySpec retrySpec, AnalyzerData data) {
if (retrySpec.retryManagerType != null) {
BSymbol retryManagerTypeSymbol = symTable.langErrorModuleSymbol.scope
.lookup(names.fromString("RetryManager")).symbol;
BType abstractRetryManagerType = retryManagerTypeSymbol.type;
if (!types.isAssignable(retrySpec.retryManagerType.getBType(), abstractRetryManagerType)) {
dlog.error(retrySpec.pos, DiagnosticErrorCode.INVALID_INTERFACE_ON_NON_ABSTRACT_OBJECT,
RETRY_MANAGER_OBJECT_SHOULD_RETRY_FUNC, retrySpec.retryManagerType.getBType());
}
}
}
@Override
public void visit(BLangRetryTransaction retryTransaction, AnalyzerData data) {
analyzeNode(retryTransaction.retrySpec, data);
analyzeNode(retryTransaction.transaction, data);
}
@Override
public void visit(BLangBlockStmt blockNode, AnalyzerData data) {
int prevCommitCount = data.commitCountWithinBlock;
int prevRollbackCount = data.rollbackCountWithinBlock;
data.commitCountWithinBlock = 0;
data.rollbackCountWithinBlock = 0;
boolean inInternallyDefinedBlockStmt = data.inInternallyDefinedBlockStmt;
data.inInternallyDefinedBlockStmt = checkBlockIsAnInternalBlockInImmediateFunctionBody(blockNode);
data.env = SymbolEnv.createBlockEnv(blockNode, data.env);
blockNode.stmts.forEach(e -> analyzeNode(e, data));
data.inInternallyDefinedBlockStmt = inInternallyDefinedBlockStmt;
if (data.commitCountWithinBlock > 1 || data.rollbackCountWithinBlock > 1) {
this.dlog.error(blockNode.pos, DiagnosticErrorCode.MAX_ONE_COMMIT_ROLLBACK_ALLOWED_WITHIN_A_BRANCH);
}
data.commitCountWithinBlock = prevCommitCount;
data.rollbackCountWithinBlock = prevRollbackCount;
}
private boolean checkBlockIsAnInternalBlockInImmediateFunctionBody(BLangNode node) {
BLangNode parent = node.parent;
while (parent != null) {
final NodeKind kind = parent.getKind();
if (kind == NodeKind.BLOCK_FUNCTION_BODY) {
return true;
}
if (kind == NodeKind.BLOCK) {
parent = parent.parent;
} else {
return false;
}
}
return false;
}
@Override
public void visit(BLangReturn returnStmt, AnalyzerData data) {
if (checkReturnValidityInTransaction(data)) {
this.dlog.error(returnStmt.pos, DiagnosticErrorCode.RETURN_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
analyzeExpr(returnStmt.expr, data);
data.returnTypes.peek().add(returnStmt.expr.getBType());
}
@Override
public void visit(BLangIf ifStmt, AnalyzerData data) {
boolean independentBlocks = false;
int prevCommitCount = data.commitCount;
int prevRollbackCount = data.rollbackCount;
BLangStatement elseStmt = ifStmt.elseStmt;
if (data.withinTransactionScope && elseStmt != null && elseStmt.getKind() != NodeKind.IF) {
independentBlocks = true;
data.commitRollbackAllowed = true;
}
boolean prevTxMode = data.withinTransactionScope;
if ((ifStmt.expr.getKind() == NodeKind.GROUP_EXPR ?
((BLangGroupExpr) ifStmt.expr).expression.getKind() :
ifStmt.expr.getKind()) == NodeKind.TRANSACTIONAL_EXPRESSION) {
data.withinTransactionScope = true;
}
BLangBlockStmt body = ifStmt.body;
analyzeNode(body, data);
if (ifStmt.expr.getKind() == NodeKind.TRANSACTIONAL_EXPRESSION) {
data.withinTransactionScope = prevTxMode;
}
if (elseStmt != null) {
if (independentBlocks) {
data.commitRollbackAllowed = true;
data.withinTransactionScope = true;
}
analyzeNode(elseStmt, data);
if ((prevCommitCount != data.commitCount) || prevRollbackCount != data.rollbackCount) {
data.commitRollbackAllowed = false;
}
}
analyzeExpr(ifStmt.expr, data);
}
@Override
public void visit(BLangMatchStatement matchStatement, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
analyzeExpr(matchStatement.expr, data);
boolean failureHandled = data.failureHandled;
if (!failureHandled) {
data.failureHandled = matchStatement.onFailClause != null;
}
List<BLangMatchClause> matchClauses = matchStatement.matchClauses;
int clausesSize = matchClauses.size();
for (int i = 0; i < clausesSize; i++) {
BLangMatchClause firstClause = matchClauses.get(i);
for (int j = i + 1; j < clausesSize; j++) {
BLangMatchClause secondClause = matchClauses.get(j);
if (!checkSimilarMatchGuard(firstClause.matchGuard, secondClause.matchGuard)) {
if (firstClause.matchGuard == null) {
checkSimilarMatchPatternsBetweenClauses(firstClause, secondClause);
}
continue;
}
checkSimilarMatchPatternsBetweenClauses(firstClause, secondClause);
}
analyzeNode(firstClause, data);
}
data.failureHandled = failureHandled;
analyzeOnFailClause(matchStatement.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangMatchClause matchClause, AnalyzerData data) {
Map<String, BVarSymbol> variablesInMatchPattern = new HashMap<>();
boolean patternListContainsSameVars = true;
List<BLangMatchPattern> matchPatterns = matchClause.matchPatterns;
BLangMatchGuard matchGuard = matchClause.matchGuard;
for (int i = 0; i < matchPatterns.size(); i++) {
BLangMatchPattern matchPattern = matchPatterns.get(i);
if (matchPattern.getBType() == symTable.noType) {
dlog.warning(matchPattern.pos, DiagnosticWarningCode.MATCH_STMT_UNMATCHED_PATTERN);
}
if (patternListContainsSameVars) {
patternListContainsSameVars = compareVariables(variablesInMatchPattern, matchPattern);
}
for (int j = i - 1; j >= 0; j--) {
if (checkSimilarMatchPatterns(matchPatterns.get(j), matchPattern)) {
dlog.warning(matchPattern.pos, DiagnosticWarningCode.MATCH_STMT_PATTERN_UNREACHABLE);
}
}
analyzeNode(matchPattern, data);
}
if (matchGuard != null) {
analyzeNode(matchGuard, data);
}
if (!patternListContainsSameVars) {
dlog.error(matchClause.pos, DiagnosticErrorCode.MATCH_PATTERNS_SHOULD_CONTAIN_SAME_SET_OF_VARIABLES);
}
analyzeNode(matchClause.blockStmt, data);
}
@Override
public void visit(BLangMappingMatchPattern mappingMatchPattern, AnalyzerData data) {
}
@Override
public void visit(BLangFieldMatchPattern fieldMatchPattern, AnalyzerData data) {
}
@Override
public void visit(BLangMatchGuard matchGuard, AnalyzerData data) {
analyzeExpr(matchGuard.expr, data);
}
private void checkSimilarMatchPatternsBetweenClauses(BLangMatchClause firstClause, BLangMatchClause secondClause) {
for (BLangMatchPattern firstMatchPattern : firstClause.matchPatterns) {
for (BLangMatchPattern secondMatchPattern : secondClause.matchPatterns) {
if (checkSimilarMatchPatterns(firstMatchPattern, secondMatchPattern)) {
dlog.warning(secondMatchPattern.pos, DiagnosticWarningCode.MATCH_STMT_PATTERN_UNREACHABLE);
}
}
}
}
private boolean checkSimilarMatchPatterns(BLangMatchPattern firstPattern, BLangMatchPattern secondPattern) {
NodeKind firstPatternKind = firstPattern.getKind();
NodeKind secondPatternKind = secondPattern.getKind();
if (firstPatternKind != secondPatternKind) {
if (firstPatternKind == NodeKind.VAR_BINDING_PATTERN_MATCH_PATTERN) {
return checkEmptyListOrMapMatchWithVarBindingPatternMatch(secondPattern,
((BLangVarBindingPatternMatchPattern) firstPattern));
}
if (secondPatternKind == NodeKind.VAR_BINDING_PATTERN_MATCH_PATTERN) {
return checkEmptyListOrMapMatchWithVarBindingPatternMatch(firstPattern,
((BLangVarBindingPatternMatchPattern) secondPattern));
}
return false;
}
switch (firstPatternKind) {
case WILDCARD_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
case CONST_MATCH_PATTERN:
return checkSimilarConstMatchPattern((BLangConstPattern) firstPattern,
(BLangConstPattern) secondPattern);
case VAR_BINDING_PATTERN_MATCH_PATTERN:
return checkSimilarBindingPatterns(
((BLangVarBindingPatternMatchPattern) firstPattern).getBindingPattern(),
((BLangVarBindingPatternMatchPattern) secondPattern).getBindingPattern());
case LIST_MATCH_PATTERN:
return checkSimilarListMatchPattern((BLangListMatchPattern) firstPattern,
(BLangListMatchPattern) secondPattern);
case MAPPING_MATCH_PATTERN:
return checkSimilarMappingMatchPattern((BLangMappingMatchPattern) firstPattern,
(BLangMappingMatchPattern) secondPattern);
case ERROR_MATCH_PATTERN:
return checkSimilarErrorMatchPattern((BLangErrorMatchPattern) firstPattern,
(BLangErrorMatchPattern) secondPattern);
default:
return false;
}
}
private boolean checkEmptyListOrMapMatchWithVarBindingPatternMatch(BLangMatchPattern firstPattern,
BLangVarBindingPatternMatchPattern secondPattern) {
if (firstPattern.getKind() == NodeKind.LIST_MATCH_PATTERN) {
BLangBindingPattern bindingPattern = secondPattern.getBindingPattern();
if (bindingPattern.getKind() != NodeKind.LIST_BINDING_PATTERN) {
return false;
}
BLangListMatchPattern listMatchPattern = (BLangListMatchPattern) firstPattern;
BLangListBindingPattern listBindingPattern = (BLangListBindingPattern) bindingPattern;
return listMatchPattern.matchPatterns.isEmpty() && listBindingPattern.bindingPatterns.isEmpty() &&
listMatchPattern.restMatchPattern == null && listBindingPattern.restBindingPattern == null;
}
if (firstPattern.getKind() == NodeKind.MAPPING_MATCH_PATTERN) {
BLangBindingPattern bindingPattern = secondPattern.getBindingPattern();
if (secondPattern.getBindingPattern().getKind() != NodeKind.MAPPING_BINDING_PATTERN) {
return false;
}
BLangMappingMatchPattern mappingMatchPattern = (BLangMappingMatchPattern) firstPattern;
BLangMappingBindingPattern mappingBindingPattern = (BLangMappingBindingPattern) bindingPattern;
return mappingMatchPattern.fieldMatchPatterns.isEmpty() &&
mappingBindingPattern.fieldBindingPatterns.isEmpty() &&
mappingMatchPattern.restMatchPattern == null && mappingBindingPattern.restBindingPattern == null;
}
return false;
}
private boolean checkSimilarErrorMatchPattern(BLangErrorMatchPattern firstErrorMatchPattern,
BLangErrorMatchPattern secondErrorMatchPattern) {
if (firstErrorMatchPattern == null || secondErrorMatchPattern == null) {
return false;
}
if (!checkSimilarErrorTypeReference(firstErrorMatchPattern.errorTypeReference,
secondErrorMatchPattern.errorTypeReference)) {
return false;
}
if (!checkSimilarErrorMessagePattern(firstErrorMatchPattern.errorMessageMatchPattern,
secondErrorMatchPattern.errorMessageMatchPattern)) {
return false;
}
if (!checkSimilarErrorCauseMatchPattern(firstErrorMatchPattern.errorCauseMatchPattern,
secondErrorMatchPattern.errorCauseMatchPattern)) {
return false;
}
return checkSimilarErrorFieldMatchPatterns(firstErrorMatchPattern.errorFieldMatchPatterns,
secondErrorMatchPattern.errorFieldMatchPatterns);
}
private boolean checkSimilarErrorTypeReference(BLangUserDefinedType firstErrorTypeRef,
BLangUserDefinedType secondErrorTypeRef) {
if (firstErrorTypeRef != null && secondErrorTypeRef != null) {
return firstErrorTypeRef.typeName.value.equals(secondErrorTypeRef.typeName.value);
}
return firstErrorTypeRef == null && secondErrorTypeRef == null;
}
private boolean checkSimilarErrorMessagePattern(BLangErrorMessageMatchPattern firstErrorMsgMatchPattern,
BLangErrorMessageMatchPattern secondErrorMsgMatchPattern) {
if (firstErrorMsgMatchPattern != null && secondErrorMsgMatchPattern != null) {
return checkSimilarSimpleMatchPattern(firstErrorMsgMatchPattern.simpleMatchPattern,
secondErrorMsgMatchPattern.simpleMatchPattern);
}
return firstErrorMsgMatchPattern == null && secondErrorMsgMatchPattern == null;
}
private boolean checkSimilarSimpleMatchPattern(BLangSimpleMatchPattern firstSimpleMatchPattern,
BLangSimpleMatchPattern secondSimpleMatchPattern) {
if (firstSimpleMatchPattern != null && secondSimpleMatchPattern != null) {
if (firstSimpleMatchPattern.varVariableName != null) {
return true;
}
BLangConstPattern firstConstPattern = firstSimpleMatchPattern.constPattern;
BLangConstPattern secondConstPattern = secondSimpleMatchPattern.constPattern;
if (firstConstPattern != null) {
if (secondConstPattern != null) {
return checkSimilarConstMatchPattern(firstConstPattern, secondConstPattern);
}
return false;
}
return secondSimpleMatchPattern.varVariableName == null;
}
return firstSimpleMatchPattern == null && secondSimpleMatchPattern == null;
}
private boolean checkSimilarErrorCauseMatchPattern(BLangErrorCauseMatchPattern firstErrorCauseMatchPattern,
BLangErrorCauseMatchPattern secondErrorCauseMatchPattern) {
if (firstErrorCauseMatchPattern != null && secondErrorCauseMatchPattern != null) {
if (!checkSimilarSimpleMatchPattern(firstErrorCauseMatchPattern.simpleMatchPattern,
secondErrorCauseMatchPattern.simpleMatchPattern)) {
return false;
}
return checkSimilarErrorMatchPattern(firstErrorCauseMatchPattern.errorMatchPattern,
secondErrorCauseMatchPattern.errorMatchPattern);
}
return firstErrorCauseMatchPattern == null && secondErrorCauseMatchPattern == null;
}
private boolean checkSimilarErrorFieldMatchPatterns(BLangErrorFieldMatchPatterns firstErrorFieldMatchPatterns,
BLangErrorFieldMatchPatterns secondErrorFieldMatchPatterns) {
if (firstErrorFieldMatchPatterns == null) {
return true;
}
List<BLangNamedArgMatchPattern> firstNamedArgPatterns = firstErrorFieldMatchPatterns.namedArgMatchPatterns;
int firstNamedArgPatternsSize = firstNamedArgPatterns.size();
if (firstNamedArgPatternsSize == 0) {
return true;
}
if (secondErrorFieldMatchPatterns == null) {
return false;
}
List<BLangNamedArgMatchPattern> secondNamedArgPatterns = secondErrorFieldMatchPatterns.namedArgMatchPatterns;
if (firstNamedArgPatternsSize > secondNamedArgPatterns.size()) {
return false;
}
for (int i = 0; i < firstNamedArgPatternsSize; i++) {
if (!checkSimilarNamedArgMatchPatterns(firstNamedArgPatterns.get(i), secondNamedArgPatterns.get(i))) {
return false;
}
}
return true;
}
private boolean checkSimilarNamedArgMatchPatterns(BLangNamedArgMatchPattern firstNamedArgMatchPattern,
BLangNamedArgMatchPattern secondNamedArgMatchPattern) {
if (firstNamedArgMatchPattern.argName.value.equals(secondNamedArgMatchPattern.argName.value)) {
return checkSimilarMatchPatterns(firstNamedArgMatchPattern.matchPattern,
secondNamedArgMatchPattern.matchPattern);
}
return false;
}
private boolean checkSimilarConstMatchPattern(BLangConstPattern firstConstMatchPattern,
BLangConstPattern secondConstMatchPattern) {
Object firstConstValue = getConstValue(firstConstMatchPattern).keySet().iterator().next();
Object secondConstValue = getConstValue(secondConstMatchPattern).keySet().iterator().next();
BType firstConstType = getConstValue(firstConstMatchPattern).values().iterator().next();
BType secondConstType = getConstValue(secondConstMatchPattern).values().iterator().next();
if (firstConstValue == null || secondConstValue == null) {
return false;
}
if (firstConstValue.equals(secondConstValue)) {
return true;
}
if (firstConstType != null && Types.getReferredType(firstConstType).tag == TypeTags.FINITE) {
firstConstValue = getConstValueFromFiniteType(((BFiniteType) firstConstType));
}
if (secondConstType != null && Types.getReferredType(secondConstType).tag == TypeTags.FINITE) {
secondConstValue = getConstValueFromFiniteType(((BFiniteType) secondConstType));
}
if (firstConstValue == null || secondConstValue == null) {
return false;
}
return firstConstValue.equals(secondConstValue);
}
private HashMap<Object, BType> getConstValue(BLangConstPattern constPattern) {
HashMap<Object, BType> constValAndType = new HashMap<>();
switch (constPattern.expr.getKind()) {
case NUMERIC_LITERAL:
constValAndType.put(((BLangNumericLiteral) constPattern.expr).value, null);
break;
case LITERAL:
constValAndType.put(((BLangLiteral) constPattern.expr).value, null);
break;
case SIMPLE_VARIABLE_REF:
constValAndType.put(((BLangSimpleVarRef) constPattern.expr).variableName, constPattern.getBType());
break;
case UNARY_EXPR:
BLangNumericLiteral newNumericLiteral = Types.constructNumericLiteralFromUnaryExpr(
(BLangUnaryExpr) constPattern.expr);
constValAndType.put(newNumericLiteral.value, null);
}
return constValAndType;
}
private Object getConstValueFromFiniteType(BFiniteType type) {
if (type.getValueSpace().size() == 1) {
BLangExpression expr = type.getValueSpace().iterator().next();
switch (expr.getKind()) {
case NUMERIC_LITERAL:
return ((BLangNumericLiteral) expr).value;
case LITERAL:
return ((BLangLiteral) expr).value;
}
}
return null;
}
private boolean checkSimilarListMatchPattern(BLangListMatchPattern firstListMatchPattern,
BLangListMatchPattern secondListMatchPattern) {
List<BLangMatchPattern> firstMatchPatterns = firstListMatchPattern.matchPatterns;
List<BLangMatchPattern> secondMatchPatterns = secondListMatchPattern.matchPatterns;
int firstPatternsSize = firstMatchPatterns.size();
int secondPatternsSize = secondMatchPatterns.size();
if (firstPatternsSize <= secondPatternsSize) {
for (int i = 0; i < firstPatternsSize; i++) {
if (!checkSimilarMatchPatterns(firstMatchPatterns.get(i), secondMatchPatterns.get(i))) {
return false;
}
}
if (firstPatternsSize == secondPatternsSize) {
if (firstListMatchPattern.restMatchPattern != null) {
return true;
}
return secondListMatchPattern.restMatchPattern == null;
}
return firstListMatchPattern.restMatchPattern != null;
}
return false;
}
private boolean checkSimilarMappingMatchPattern(BLangMappingMatchPattern firstMappingMatchPattern,
BLangMappingMatchPattern secondMappingMatchPattern) {
List<BLangFieldMatchPattern> firstFieldMatchPatterns = firstMappingMatchPattern.fieldMatchPatterns;
List<BLangFieldMatchPattern> secondFieldMatchPatterns = secondMappingMatchPattern.fieldMatchPatterns;
return checkSimilarFieldMatchPatterns(firstFieldMatchPatterns, secondFieldMatchPatterns);
}
private boolean checkSimilarFieldMatchPatterns(List<BLangFieldMatchPattern> firstFieldMatchPatterns,
List<BLangFieldMatchPattern> secondFieldMatchPatterns) {
for (BLangFieldMatchPattern firstFieldMatchPattern : firstFieldMatchPatterns) {
boolean isSamePattern = false;
for (BLangFieldMatchPattern secondFieldMatchPattern : secondFieldMatchPatterns) {
if (checkSimilarFieldMatchPattern(firstFieldMatchPattern, secondFieldMatchPattern)) {
isSamePattern = true;
break;
}
}
if (!isSamePattern) {
return false;
}
}
return true;
}
private boolean checkSimilarFieldMatchPattern(BLangFieldMatchPattern firstFieldMatchPattern,
BLangFieldMatchPattern secondFieldMatchPattern) {
return firstFieldMatchPattern.fieldName.value.equals(secondFieldMatchPattern.fieldName.value) &&
checkSimilarMatchPatterns(firstFieldMatchPattern.matchPattern, secondFieldMatchPattern.matchPattern);
}
private boolean checkSimilarBindingPatterns(BLangBindingPattern firstBidingPattern,
BLangBindingPattern secondBindingPattern) {
NodeKind firstBindingPatternKind = firstBidingPattern.getKind();
NodeKind secondBindingPatternKind = secondBindingPattern.getKind();
if (firstBindingPatternKind != secondBindingPatternKind) {
return false;
}
switch (firstBindingPatternKind) {
case WILDCARD_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case CAPTURE_BINDING_PATTERN:
return true;
case LIST_BINDING_PATTERN:
return checkSimilarListBindingPatterns((BLangListBindingPattern) firstBidingPattern,
(BLangListBindingPattern) secondBindingPattern);
case MAPPING_BINDING_PATTERN:
return checkSimilarMappingBindingPattern((BLangMappingBindingPattern) firstBidingPattern,
(BLangMappingBindingPattern) secondBindingPattern);
case ERROR_BINDING_PATTERN:
return checkSimilarErrorBindingPatterns((BLangErrorBindingPattern) firstBidingPattern,
(BLangErrorBindingPattern) secondBindingPattern);
default:
return false;
}
}
private boolean checkSimilarMappingBindingPattern(BLangMappingBindingPattern firstMappingBindingPattern,
BLangMappingBindingPattern secondMappingBindingPattern) {
List<BLangFieldBindingPattern> firstFieldBindingPatterns = firstMappingBindingPattern.fieldBindingPatterns;
List<BLangFieldBindingPattern> secondFieldBindingPatterns = secondMappingBindingPattern.fieldBindingPatterns;
return checkSimilarFieldBindingPatterns(firstFieldBindingPatterns, secondFieldBindingPatterns);
}
private boolean checkSimilarFieldBindingPatterns(List<BLangFieldBindingPattern> firstFieldBindingPatterns,
List<BLangFieldBindingPattern> secondFieldBindingPatterns) {
for (BLangFieldBindingPattern firstFieldBindingPattern : firstFieldBindingPatterns) {
boolean isSamePattern = false;
for (BLangFieldBindingPattern secondFieldBindingPattern : secondFieldBindingPatterns) {
if (checkSimilarFieldBindingPattern(firstFieldBindingPattern, secondFieldBindingPattern)) {
isSamePattern = true;
break;
}
}
if (!isSamePattern) {
return false;
}
}
return true;
}
private boolean checkSimilarFieldBindingPattern(BLangFieldBindingPattern firstFieldBindingPattern,
BLangFieldBindingPattern secondFieldBindingPattern) {
boolean hasSameFieldNames = firstFieldBindingPattern.fieldName.value.
equals(secondFieldBindingPattern.fieldName.value);
if (firstFieldBindingPattern.bindingPattern.getKind() == secondFieldBindingPattern.bindingPattern.getKind()) {
return hasSameFieldNames && checkSimilarBindingPatterns(firstFieldBindingPattern.bindingPattern,
secondFieldBindingPattern.bindingPattern);
}
return hasSameFieldNames && firstFieldBindingPattern.bindingPattern.getKind() ==
NodeKind.CAPTURE_BINDING_PATTERN;
}
private boolean checkSimilarListBindingPatterns(BLangListBindingPattern firstBindingPattern,
BLangListBindingPattern secondBindingPattern) {
List<BLangBindingPattern> firstPatterns = firstBindingPattern.bindingPatterns;
List<BLangBindingPattern> secondPatterns = secondBindingPattern.bindingPatterns;
int firstPatternsSize = firstPatterns.size();
int secondPatternsSize = secondPatterns.size();
if (firstPatternsSize <= secondPatternsSize) {
for (int i = 0; i < firstPatternsSize; i++) {
if (!checkSimilarBindingPatterns(firstPatterns.get(i), secondPatterns.get(i))) {
return firstPatterns.get(i).getKind() == NodeKind.CAPTURE_BINDING_PATTERN;
}
}
if (firstPatternsSize == secondPatternsSize) {
if (firstBindingPattern.restBindingPattern != null) {
return true;
}
return secondBindingPattern.restBindingPattern == null;
}
return secondBindingPattern.restBindingPattern != null;
}
return false;
}
private boolean checkSimilarErrorBindingPatterns(BLangErrorBindingPattern firstErrorBindingPattern,
BLangErrorBindingPattern secondErrorBindingPattern) {
if (firstErrorBindingPattern == null || secondErrorBindingPattern == null) {
return false;
}
if (!checkSimilarErrorTypeReference(firstErrorBindingPattern.errorTypeReference,
secondErrorBindingPattern.errorTypeReference)) {
return false;
}
if (!checkSimilarErrorMessageBindingPattern(firstErrorBindingPattern.errorMessageBindingPattern,
secondErrorBindingPattern.errorMessageBindingPattern)) {
return false;
}
if (!checkSimilarErrorCauseBindingPattern(firstErrorBindingPattern.errorCauseBindingPattern,
secondErrorBindingPattern.errorCauseBindingPattern)) {
return false;
}
return checkSimilarErrorFieldBindingPatterns(firstErrorBindingPattern.errorFieldBindingPatterns,
secondErrorBindingPattern.errorFieldBindingPatterns);
}
private boolean checkSimilarErrorMessageBindingPattern(BLangErrorMessageBindingPattern firstErrorMsgBindingPattern,
BLangErrorMessageBindingPattern secondErrorMsgBindingPattern) {
if (firstErrorMsgBindingPattern != null && secondErrorMsgBindingPattern != null) {
return checkSimilarSimpleBindingPattern(firstErrorMsgBindingPattern.simpleBindingPattern,
secondErrorMsgBindingPattern.simpleBindingPattern);
}
return firstErrorMsgBindingPattern == null && secondErrorMsgBindingPattern == null;
}
private boolean checkSimilarSimpleBindingPattern(BLangSimpleBindingPattern firstSimpleBindingPattern,
BLangSimpleBindingPattern secondSimpleBindingPattern) {
if (firstSimpleBindingPattern != null && secondSimpleBindingPattern != null) {
BLangBindingPattern firstCaptureBindingPattern = firstSimpleBindingPattern.captureBindingPattern;
BLangBindingPattern secondCaptureBindingPattern = secondSimpleBindingPattern.captureBindingPattern;
if (firstCaptureBindingPattern != null && secondCaptureBindingPattern != null) {
return checkSimilarBindingPatterns(firstCaptureBindingPattern, secondCaptureBindingPattern);
}
return firstSimpleBindingPattern.wildCardBindingPattern != null;
}
return firstSimpleBindingPattern == null && secondSimpleBindingPattern == null;
}
private boolean checkSimilarErrorCauseBindingPattern(BLangErrorCauseBindingPattern firstErrorCauseBindingPattern,
BLangErrorCauseBindingPattern secondErrorCauseBindingPattern) {
if (firstErrorCauseBindingPattern != null && secondErrorCauseBindingPattern != null) {
if (!checkSimilarSimpleBindingPattern(firstErrorCauseBindingPattern.simpleBindingPattern,
secondErrorCauseBindingPattern.simpleBindingPattern)) {
return false;
}
return checkSimilarErrorBindingPatterns(firstErrorCauseBindingPattern.errorBindingPattern,
secondErrorCauseBindingPattern.errorBindingPattern);
}
return firstErrorCauseBindingPattern == null && secondErrorCauseBindingPattern == null;
}
private boolean checkSimilarErrorFieldBindingPatterns(
BLangErrorFieldBindingPatterns firstErrorFieldBindingPatterns,
BLangErrorFieldBindingPatterns secondErrorFieldBindingPatterns) {
if (firstErrorFieldBindingPatterns == null) {
return true;
}
List<BLangNamedArgBindingPattern> firstNamedArgPatterns =
firstErrorFieldBindingPatterns.namedArgBindingPatterns;
int firstNamedArgPatternsSize = firstNamedArgPatterns.size();
if (firstNamedArgPatternsSize == 0) {
return true;
}
if (secondErrorFieldBindingPatterns == null) {
return false;
}
List<BLangNamedArgBindingPattern> secondNamedArgPatterns =
secondErrorFieldBindingPatterns.namedArgBindingPatterns;
if (firstNamedArgPatternsSize > secondNamedArgPatterns.size()) {
return false;
}
for (int i = 0; i < firstNamedArgPatternsSize; i++) {
if (!checkSimilarNamedArgBindingPatterns(firstNamedArgPatterns.get(i), secondNamedArgPatterns.get(i))) {
return false;
}
}
return true;
}
private boolean checkSimilarNamedArgBindingPatterns(BLangNamedArgBindingPattern firstNamedArgBindingPattern,
BLangNamedArgBindingPattern secondNamedArgBindingPattern) {
if (firstNamedArgBindingPattern.argName.value.equals(secondNamedArgBindingPattern.argName.value)) {
return checkSimilarBindingPatterns(firstNamedArgBindingPattern.bindingPattern,
secondNamedArgBindingPattern.bindingPattern);
}
return false;
}
private boolean checkSimilarMatchGuard(BLangMatchGuard firstMatchGuard, BLangMatchGuard secondMatchGuard) {
if (firstMatchGuard == null && secondMatchGuard == null) {
return true;
}
if (firstMatchGuard == null || secondMatchGuard == null) {
return false;
}
if (firstMatchGuard.expr.getKind() == NodeKind.TYPE_TEST_EXPR &&
secondMatchGuard.expr.getKind() == NodeKind.TYPE_TEST_EXPR &&
((BLangTypeTestExpr) firstMatchGuard.expr).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF &&
((BLangTypeTestExpr) secondMatchGuard.expr).expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangTypeTestExpr firstTypeTest = (BLangTypeTestExpr) firstMatchGuard.expr;
BLangTypeTestExpr secondTypeTest = (BLangTypeTestExpr) secondMatchGuard.expr;
return ((BLangSimpleVarRef) firstTypeTest.expr).variableName.toString().equals(
((BLangSimpleVarRef) secondTypeTest.expr).variableName.toString()) &&
types.isAssignable(firstTypeTest.typeNode.getBType(),
secondTypeTest.typeNode.getBType());
}
return false;
}
private boolean compareVariables(Map<String, BVarSymbol> varsInPreviousMatchPattern,
BLangMatchPattern matchPattern) {
Map<String, BVarSymbol> varsInCurrentMatchPattern = matchPattern.declaredVars;
if (varsInPreviousMatchPattern.size() == 0) {
varsInPreviousMatchPattern.putAll(varsInCurrentMatchPattern);
return true;
}
if (varsInPreviousMatchPattern.size() != varsInCurrentMatchPattern.size()) {
return false;
}
for (String identifier : varsInPreviousMatchPattern.keySet()) {
if (!varsInCurrentMatchPattern.containsKey(identifier)) {
return false;
}
}
return true;
}
@Override
public void visit(BLangWildCardMatchPattern wildCardMatchPattern, AnalyzerData data) {
wildCardMatchPattern.isLastPattern =
wildCardMatchPattern.matchExpr != null && types.isAssignable(wildCardMatchPattern.matchExpr.getBType(),
symTable.anyType);
}
@Override
public void visit(BLangConstPattern constMatchPattern, AnalyzerData data) {
analyzeNode(constMatchPattern.expr, data);
}
@Override
public void visit(BLangVarBindingPatternMatchPattern varBindingPattern, AnalyzerData data) {
BLangBindingPattern bindingPattern = varBindingPattern.getBindingPattern();
analyzeNode(bindingPattern, data);
switch (bindingPattern.getKind()) {
case WILDCARD_BINDING_PATTERN:
varBindingPattern.isLastPattern =
varBindingPattern.matchExpr != null && types.isAssignable(
varBindingPattern.matchExpr.getBType(),
symTable.anyType);
return;
case CAPTURE_BINDING_PATTERN:
varBindingPattern.isLastPattern =
varBindingPattern.matchExpr != null && !varBindingPattern.matchGuardIsAvailable;
return;
case LIST_BINDING_PATTERN:
if (varBindingPattern.matchExpr == null) {
return;
}
varBindingPattern.isLastPattern = types.isSameType(varBindingPattern.matchExpr.getBType(),
varBindingPattern.getBType()) || types.isAssignable(
varBindingPattern.matchExpr.getBType(),
varBindingPattern.getBType());
}
}
@Override
public void visit(BLangMappingBindingPattern mappingBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangWildCardBindingPattern wildCardBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangListMatchPattern listMatchPattern, AnalyzerData data) {
if (listMatchPattern.matchExpr == null) {
return;
}
listMatchPattern.isLastPattern = types.isAssignable(listMatchPattern.matchExpr.getBType(),
listMatchPattern.getBType()) && !isConstMatchPatternExist(listMatchPattern);
}
private boolean isConstMatchPatternExist(BLangMatchPattern matchPattern) {
switch (matchPattern.getKind()) {
case CONST_MATCH_PATTERN:
return true;
case LIST_MATCH_PATTERN:
for (BLangMatchPattern memberMatchPattern : ((BLangListMatchPattern) matchPattern).matchPatterns) {
if (isConstMatchPatternExist(memberMatchPattern)) {
return true;
}
}
return false;
case MAPPING_MATCH_PATTERN:
for (BLangFieldMatchPattern fieldMatchPattern :
((BLangMappingMatchPattern) matchPattern).fieldMatchPatterns) {
if (isConstMatchPatternExist(fieldMatchPattern.matchPattern)) {
return true;
}
}
return false;
default:
return false;
}
}
@Override
public void visit(BLangCaptureBindingPattern captureBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangListBindingPattern listBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangErrorMatchPattern errorMatchPattern, AnalyzerData data) {
}
@Override
public void visit(BLangErrorBindingPattern errorBindingPattern, AnalyzerData data) {
}
@Override
public void visit(BLangForeach foreach, AnalyzerData data) {
data.loopWithinTransactionCheckStack.push(true);
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = foreach.onFailClause != null;
}
data.loopCount++;
BLangBlockStmt body = foreach.body;
data.env = SymbolEnv.createLoopEnv(foreach, data.env);
analyzeNode(body, data);
data.loopCount--;
data.failureHandled = failureHandled;
data.loopWithinTransactionCheckStack.pop();
analyzeExpr(foreach.collection, data);
body.failureBreakMode = foreach.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(foreach.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangWhile whileNode, AnalyzerData data) {
data.loopWithinTransactionCheckStack.push(true);
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = whileNode.onFailClause != null;
}
data.loopCount++;
BLangBlockStmt body = whileNode.body;
data.env = SymbolEnv.createLoopEnv(whileNode, data.env);
analyzeNode(body, data);
data.loopCount--;
data.failureHandled = failureHandled;
data.loopWithinTransactionCheckStack.pop();
analyzeExpr(whileNode.expr, data);
analyzeOnFailClause(whileNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangDo doNode, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = doNode.onFailClause != null;
}
analyzeNode(doNode.body, data);
data.failureHandled = failureHandled;
doNode.body.failureBreakMode = doNode.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(doNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangFail failNode, AnalyzerData data) {
data.failVisited = true;
analyzeExpr(failNode.expr, data);
if (data.env.scope.owner.getKind() == SymbolKind.PACKAGE) {
return;
}
typeChecker.checkExpr(failNode.expr, data.env);
if (!data.errorTypes.empty()) {
data.errorTypes.peek().add(getErrorTypes(failNode.expr.getBType()));
}
if (!data.failureHandled) {
BType exprType = data.env.enclInvokable.getReturnTypeNode().getBType();
data.returnTypes.peek().add(exprType);
if (!types.isAssignable(getErrorTypes(failNode.expr.getBType()), exprType)) {
dlog.error(failNode.pos, DiagnosticErrorCode.FAIL_EXPR_NO_MATCHING_ERROR_RETURN_IN_ENCL_INVOKABLE);
}
}
}
@Override
public void visit(BLangLock lockNode, AnalyzerData data) {
data.errorTypes.push(new LinkedHashSet<>());
boolean failureHandled = data.failureHandled;
if (!data.failureHandled) {
data.failureHandled = lockNode.onFailClause != null;
}
boolean previousWithinLockBlock = data.withinLockBlock;
data.withinLockBlock = true;
lockNode.body.stmts.forEach(e -> analyzeNode(e, data));
data.withinLockBlock = previousWithinLockBlock;
data.failureHandled = failureHandled;
lockNode.body.failureBreakMode = lockNode.onFailClause != null ?
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK : BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
analyzeOnFailClause(lockNode.onFailClause, data);
data.errorTypes.pop();
}
@Override
public void visit(BLangContinue continueNode, AnalyzerData data) {
if (data.loopCount == 0) {
this.dlog.error(continueNode.pos, DiagnosticErrorCode.CONTINUE_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction(data)) {
this.dlog.error(continueNode.pos, DiagnosticErrorCode.CONTINUE_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
if (data.loopAlterNotAllowed) {
this.dlog.error(continueNode.pos, DiagnosticErrorCode.CONTINUE_NOT_ALLOWED);
}
}
@Override
public void visit(BLangImportPackage importPkgNode, AnalyzerData data) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
if (pkgEnv == null) {
return;
}
analyzeNode(pkgEnv.node, data);
}
@Override
public void visit(BLangXMLNS xmlnsNode, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangClientDeclaration node, AnalyzerData data) {
}
@Override
public void visit(BLangService serviceNode, AnalyzerData data) {
}
private void analyzeExportableTypeRef(BSymbol owner, BTypeSymbol symbol, boolean inFuncSignature,
Location pos) {
if (!inFuncSignature && Symbols.isFlagOn(owner.flags, Flags.ANONYMOUS)) {
return;
}
if (Symbols.isPublic(owner)) {
HashSet<BTypeSymbol> visitedSymbols = new HashSet<>();
checkForExportableType(symbol, pos, visitedSymbols);
}
}
private void checkForExportableType(BTypeSymbol symbol, Location pos, HashSet<BTypeSymbol> visitedSymbols) {
if (symbol == null || symbol.type == null || Symbols.isFlagOn(symbol.flags, Flags.TYPE_PARAM)) {
return;
}
if (!visitedSymbols.add(symbol)) {
return;
}
BType symbolType = symbol.type;
switch (symbolType.tag) {
case TypeTags.ARRAY:
checkForExportableType(((BArrayType) symbolType).eType.tsymbol, pos, visitedSymbols);
return;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) symbolType;
tupleType.tupleTypes.forEach(t -> checkForExportableType(t.tsymbol, pos, visitedSymbols));
if (tupleType.restType != null) {
checkForExportableType(tupleType.restType.tsymbol, pos, visitedSymbols);
}
return;
case TypeTags.MAP:
checkForExportableType(((BMapType) symbolType).constraint.tsymbol, pos, visitedSymbols);
return;
case TypeTags.RECORD:
if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) {
BRecordType recordType = (BRecordType) symbolType;
recordType.fields.values().forEach(f -> checkForExportableType(f.type.tsymbol, pos,
visitedSymbols));
if (recordType.restFieldType != null) {
checkForExportableType(recordType.restFieldType.tsymbol, pos, visitedSymbols);
}
return;
}
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) symbolType;
if (tableType.constraint != null) {
checkForExportableType(tableType.constraint.tsymbol, pos, visitedSymbols);
}
return;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) symbolType;
if (streamType.constraint != null) {
checkForExportableType(streamType.constraint.tsymbol, pos, visitedSymbols);
}
return;
case TypeTags.INVOKABLE:
BInvokableType invokableType = (BInvokableType) symbolType;
if (Symbols.isFlagOn(invokableType.flags, Flags.ANY_FUNCTION)) {
return;
}
if (invokableType.paramTypes != null) {
for (BType paramType : invokableType.paramTypes) {
checkForExportableType(paramType.tsymbol, pos, visitedSymbols);
}
}
if (invokableType.restType != null) {
checkForExportableType(invokableType.restType.tsymbol, pos, visitedSymbols);
}
checkForExportableType(invokableType.retType.tsymbol, pos, visitedSymbols);
return;
case TypeTags.PARAMETERIZED_TYPE:
BTypeSymbol parameterizedType = ((BParameterizedType) symbolType).paramValueType.tsymbol;
checkForExportableType(parameterizedType, pos, visitedSymbols);
return;
case TypeTags.ERROR:
if (Symbols.isFlagOn(symbol.flags, Flags.ANONYMOUS)) {
checkForExportableType((((BErrorType) symbolType).detailType.tsymbol), pos, visitedSymbols);
return;
}
break;
case TypeTags.TYPEREFDESC:
symbolType = Types.getReferredType(symbolType);
checkForExportableType(symbolType.tsymbol, pos, visitedSymbols);
return;
}
if (!Symbols.isPublic(symbol)) {
dlog.warning(pos, DiagnosticWarningCode.ATTEMPT_EXPOSE_NON_PUBLIC_SYMBOL, symbol.name);
}
}
@Override
public void visit(BLangLetExpression letExpression, AnalyzerData data) {
int ownerSymTag = data.env.scope.owner.tag;
if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD) {
dlog.error(letExpression.pos, DiagnosticErrorCode.LET_EXPRESSION_NOT_YET_SUPPORTED_RECORD_FIELD);
} else if ((ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) {
dlog.error(letExpression.pos, DiagnosticErrorCode.LET_EXPRESSION_NOT_YET_SUPPORTED_OBJECT_FIELD);
}
data.env = letExpression.env;
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
analyzeNode((BLangNode) letVariable.definitionNode, data);
}
analyzeExpr(letExpression.expr, data);
}
@Override
public void visit(BLangSimpleVariable varNode, AnalyzerData data) {
analyzeTypeNode(varNode.typeNode, data);
analyzeExpr(varNode.expr, data);
if (Objects.isNull(varNode.symbol)) {
return;
}
if (!Symbols.isPublic(varNode.symbol)) {
return;
}
int ownerSymTag = data.env.scope.owner.tag;
if ((ownerSymTag & SymTag.RECORD) == SymTag.RECORD || (ownerSymTag & SymTag.OBJECT) == SymTag.OBJECT) {
analyzeExportableTypeRef(data.env.scope.owner, varNode.getBType().tsymbol, false, varNode.pos);
} else if ((ownerSymTag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
analyzeExportableTypeRef(varNode.symbol, varNode.getBType().tsymbol, false, varNode.pos);
}
varNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
private boolean isValidInferredArray(BLangNode node) {
switch (node.getKind()) {
case INTERSECTION_TYPE_NODE:
case UNION_TYPE_NODE:
return isValidInferredArray(node.parent);
case VARIABLE:
BLangSimpleVariable varNode = (BLangSimpleVariable) node;
BLangExpression expr = varNode.expr;
return expr != null && isValidContextForInferredArray(node.parent) &&
isValidVariableForInferredArray(expr);
default:
return false;
}
}
private boolean isValidContextForInferredArray(BLangNode node) {
switch (node.getKind()) {
case PACKAGE:
case EXPR_FUNCTION_BODY:
case BLOCK_FUNCTION_BODY:
case BLOCK:
return true;
case VARIABLE_DEF:
return isValidContextForInferredArray(node.parent);
default:
return false;
}
}
private boolean isValidVariableForInferredArray(BLangNode node) {
switch (node.getKind()) {
case LITERAL:
if (node.getBType().tag == TypeTags.ARRAY) {
return true;
}
break;
case LIST_CONSTRUCTOR_EXPR:
return true;
case GROUP_EXPR:
return isValidVariableForInferredArray(((BLangGroupExpr) node).expression);
}
return false;
}
@Override
public void visit(BLangTupleVariable bLangTupleVariable, AnalyzerData data) {
if (bLangTupleVariable.typeNode != null) {
analyzeNode(bLangTupleVariable.typeNode, data);
}
analyzeExpr(bLangTupleVariable.expr, data);
}
@Override
public void visit(BLangRecordVariable bLangRecordVariable, AnalyzerData data) {
if (bLangRecordVariable.typeNode != null) {
analyzeNode(bLangRecordVariable.typeNode, data);
}
analyzeExpr(bLangRecordVariable.expr, data);
}
@Override
public void visit(BLangErrorVariable bLangErrorVariable, AnalyzerData data) {
if (bLangErrorVariable.typeNode != null) {
analyzeNode(bLangErrorVariable.typeNode, data);
}
analyzeExpr(bLangErrorVariable.expr, data);
}
@Override
public void visit(BLangIdentifier identifierNode, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangAnnotation annotationNode, AnalyzerData data) {
annotationNode.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangAnnotationAttachment annAttachmentNode, AnalyzerData data) {
analyzeExpr(annAttachmentNode.expr, data);
BAnnotationSymbol annotationSymbol = annAttachmentNode.annotationSymbol;
if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(annAttachmentNode.annotationName.toString(), annotationSymbol, annAttachmentNode.pos);
}
}
@Override
public void visit(BLangSimpleVariableDef varDefNode, AnalyzerData data) {
analyzeNode(varDefNode.var, data);
}
@Override
public void visit(BLangCompoundAssignment compoundAssignment, AnalyzerData data) {
BLangValueExpression varRef = compoundAssignment.varRef;
analyzeExpr(varRef, data);
analyzeExpr(compoundAssignment.expr, data);
}
@Override
public void visit(BLangAssignment assignNode, AnalyzerData data) {
BLangExpression varRef = assignNode.varRef;
analyzeExpr(varRef, data);
analyzeExpr(assignNode.expr, data);
}
@Override
public void visit(BLangRecordDestructure stmt, AnalyzerData data) {
List<BLangExpression> varRefs = getVarRefs(stmt.varRef);
this.checkDuplicateVarRefs(varRefs);
analyzeExpr(stmt.varRef, data);
analyzeExpr(stmt.expr, data);
}
@Override
public void visit(BLangErrorDestructure stmt, AnalyzerData data) {
List<BLangExpression> varRefs = getVarRefs(stmt.varRef);
this.checkDuplicateVarRefs(varRefs);
analyzeExpr(stmt.varRef, data);
analyzeExpr(stmt.expr, data);
}
@Override
public void visit(BLangTupleDestructure stmt, AnalyzerData data) {
List<BLangExpression> varRefs = getVarRefs(stmt.varRef);
this.checkDuplicateVarRefs(varRefs);
analyzeExpr(stmt.varRef, data);
analyzeExpr(stmt.expr, data);
}
private void checkDuplicateVarRefs(List<BLangExpression> varRefs) {
checkDuplicateVarRefs(varRefs, new HashSet<>());
}
private List<BLangExpression> getVarRefs(BLangRecordVarRef varRef) {
List<BLangExpression> varRefs = varRef.recordRefFields.stream()
.map(e -> e.variableReference).collect(Collectors.toList());
if (varRef.restParam != null) {
varRefs.add(varRef.restParam);
}
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangErrorVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>();
if (varRef.message != null) {
varRefs.add(varRef.message);
}
if (varRef.cause != null) {
varRefs.add(varRef.cause);
}
varRefs.addAll(varRef.detail.stream().map(e -> e.expr).collect(Collectors.toList()));
if (varRef.restVar != null) {
varRefs.add(varRef.restVar);
}
return varRefs;
}
private List<BLangExpression> getVarRefs(BLangTupleVarRef varRef) {
List<BLangExpression> varRefs = new ArrayList<>(varRef.expressions);
if (varRef.restParam != null) {
varRefs.add(varRef.restParam);
}
return varRefs;
}
@Override
public void visit(BLangBreak breakNode, AnalyzerData data) {
if (data.loopCount == 0) {
this.dlog.error(breakNode.pos, DiagnosticErrorCode.BREAK_CANNOT_BE_OUTSIDE_LOOP);
return;
}
if (checkNextBreakValidityInTransaction(data)) {
this.dlog.error(breakNode.pos, DiagnosticErrorCode.BREAK_CANNOT_BE_USED_TO_EXIT_TRANSACTION);
return;
}
if (data.loopAlterNotAllowed) {
this.dlog.error(breakNode.pos, DiagnosticErrorCode.BREAK_NOT_ALLOWED);
}
}
@Override
public void visit(BLangPanic panicNode, AnalyzerData data) {
analyzeExpr(panicNode.expr, data);
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode, AnalyzerData data) {
}
@Override
public void visit(BLangClientDeclarationStatement clientDeclarationStatement, AnalyzerData data) {
analyzeNode(clientDeclarationStatement.clientDeclaration, data);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode, AnalyzerData data) {
BLangExpression expr = exprStmtNode.expr;
analyzeExpr(expr, data);
}
private boolean isTopLevel(SymbolEnv env) {
return env.enclInvokable.body == env.node;
}
private boolean isInWorker(SymbolEnv env) {
return env.enclInvokable.flagSet.contains(Flag.WORKER);
}
private boolean isCommunicationAllowedLocation(SymbolEnv env) {
return isTopLevel(env);
}
private boolean isDefaultWorkerCommunication(String workerIdentifier) {
return workerIdentifier.equals(DEFAULT_WORKER_NAME);
}
private boolean workerExists(BType type, String workerName, SymbolEnv env) {
if (isDefaultWorkerCommunication(workerName) && isInWorker(env)) {
return true;
}
if (type == symTable.semanticError) {
return false;
}
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.FUTURE && ((BFutureType) refType).workerDerivative;
}
@Override
public void visit(BLangWorkerSend workerSendNode, AnalyzerData data) {
BSymbol receiver =
symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(workerSendNode.workerIdentifier));
if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {
receiver = symTable.notFoundSymbol;
}
verifyPeerCommunication(workerSendNode.pos, receiver, workerSendNode.workerIdentifier.value, data.env);
WorkerActionSystem was = data.workerActionSystemStack.peek();
BType type = workerSendNode.expr.getBType();
if (type == symTable.semanticError) {
was.hasErrors = true;
} else if (workerSendNode.expr instanceof ActionNode) {
this.dlog.error(workerSendNode.expr.pos, DiagnosticErrorCode.INVALID_SEND_EXPR);
} else if (!types.isAssignable(type, symTable.cloneableType)) {
this.dlog.error(workerSendNode.pos, DiagnosticErrorCode.INVALID_TYPE_FOR_SEND, type);
}
String workerName = workerSendNode.workerIdentifier.getValue();
if (data.withinQuery || (!isCommunicationAllowedLocation(data.env) && !data.inInternallyDefinedBlockStmt)) {
this.dlog.error(workerSendNode.pos, DiagnosticErrorCode.UNSUPPORTED_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerSendNode.getBType(), workerName, data.env)
|| (!isWorkerFromFunction(data.env, names.fromString(workerName)) && !workerName.equals("function"))) {
this.dlog.error(workerSendNode.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerSendNode.setBType(
createAccumulatedErrorTypeForMatchingReceive(workerSendNode.pos, workerSendNode.expr.getBType(), data));
was.addWorkerAction(workerSendNode);
analyzeExpr(workerSendNode.expr, data);
validateActionParentNode(workerSendNode.pos, workerSendNode.expr);
}
private BType createAccumulatedErrorTypeForMatchingReceive(Location pos, BType exprType, AnalyzerData data) {
Set<BType> returnTypesUpToNow = data.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>() {
{
Comparator.comparing(BType::toString);
}
};
for (BType returnType : returnTypesUpToNow) {
if (onlyContainErrors(returnType)) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(pos, DiagnosticErrorCode.WORKER_SEND_AFTER_RETURN);
}
}
returnTypeAndSendType.add(exprType);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return exprType;
}
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr, AnalyzerData data) {
BSymbol receiver =
symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(syncSendExpr.workerIdentifier));
if ((receiver.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {
receiver = symTable.notFoundSymbol;
}
verifyPeerCommunication(syncSendExpr.pos, receiver, syncSendExpr.workerIdentifier.value, data.env);
validateActionParentNode(syncSendExpr.pos, syncSendExpr);
String workerName = syncSendExpr.workerIdentifier.getValue();
WorkerActionSystem was = data.workerActionSystemStack.peek();
if (data.withinQuery || (!isCommunicationAllowedLocation(data.env) && !data.inInternallyDefinedBlockStmt)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNSUPPORTED_WORKER_SEND_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(syncSendExpr.workerType, workerName, data.env)) {
this.dlog.error(syncSendExpr.pos, DiagnosticErrorCode.UNDEFINED_WORKER, syncSendExpr.workerSymbol);
was.hasErrors = true;
}
syncSendExpr.setBType(
createAccumulatedErrorTypeForMatchingReceive(syncSendExpr.pos, syncSendExpr.expr.getBType(), data));
was.addWorkerAction(syncSendExpr);
analyzeExpr(syncSendExpr.expr, data);
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode, AnalyzerData data) {
validateActionParentNode(workerReceiveNode.pos, workerReceiveNode);
BSymbol sender =
symResolver.lookupSymbolInMainSpace(data.env, names.fromIdNode(workerReceiveNode.workerIdentifier));
if ((sender.tag & SymTag.VARIABLE) != SymTag.VARIABLE) {
sender = symTable.notFoundSymbol;
}
verifyPeerCommunication(workerReceiveNode.pos, sender, workerReceiveNode.workerIdentifier.value, data.env);
WorkerActionSystem was = data.workerActionSystemStack.peek();
String workerName = workerReceiveNode.workerIdentifier.getValue();
if (data.withinQuery || (!isCommunicationAllowedLocation(data.env) && !data.inInternallyDefinedBlockStmt)) {
this.dlog.error(workerReceiveNode.pos, DiagnosticErrorCode.INVALID_WORKER_RECEIVE_POSITION);
was.hasErrors = true;
}
if (!this.workerExists(workerReceiveNode.workerType, workerName, data.env)) {
this.dlog.error(workerReceiveNode.pos, DiagnosticErrorCode.UNDEFINED_WORKER, workerName);
was.hasErrors = true;
}
workerReceiveNode.matchingSendsError = createAccumulatedErrorTypeForMatchingSyncSend(workerReceiveNode, data);
was.addWorkerAction(workerReceiveNode);
}
private void verifyPeerCommunication(Location pos, BSymbol otherWorker, String otherWorkerName, SymbolEnv env) {
if (env.enclEnv.node.getKind() != NodeKind.FUNCTION) {
return;
}
BLangFunction funcNode = (BLangFunction) env.enclEnv.node;
Set<Flag> flagSet = funcNode.flagSet;
Name workerDerivedName = names.fromString("0" + otherWorker.name.value);
if (flagSet.contains(Flag.WORKER)) {
if (otherWorkerName.equals(DEFAULT_WORKER_NAME)) {
if (flagSet.contains(Flag.FORKED)) {
dlog.error(pos, DiagnosticErrorCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS);
}
return;
}
Scope enclFunctionScope = env.enclEnv.enclEnv.scope;
BInvokableSymbol wLambda = (BInvokableSymbol) enclFunctionScope.lookup(workerDerivedName).symbol;
if (wLambda != null && funcNode.anonForkName != null
&& !funcNode.anonForkName.equals(wLambda.enclForkName)) {
dlog.error(pos, DiagnosticErrorCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS);
}
} else {
BInvokableSymbol wLambda = (BInvokableSymbol) env.scope.lookup(workerDerivedName).symbol;
if (wLambda != null && wLambda.enclForkName != null) {
dlog.error(pos, DiagnosticErrorCode.WORKER_INTERACTIONS_ONLY_ALLOWED_BETWEEN_PEERS);
}
}
}
public BType createAccumulatedErrorTypeForMatchingSyncSend(BLangWorkerReceive workerReceiveNode,
AnalyzerData data) {
Set<BType> returnTypesUpToNow = data.returnTypes.peek();
LinkedHashSet<BType> returnTypeAndSendType = new LinkedHashSet<>();
for (BType returnType : returnTypesUpToNow) {
if (onlyContainErrors(returnType)) {
returnTypeAndSendType.add(returnType);
} else {
this.dlog.error(workerReceiveNode.pos, DiagnosticErrorCode.WORKER_RECEIVE_AFTER_RETURN);
}
}
returnTypeAndSendType.add(symTable.nilType);
if (returnTypeAndSendType.size() > 1) {
return BUnionType.create(null, returnTypeAndSendType);
} else {
return symTable.nilType;
}
}
private boolean onlyContainErrors(BType returnType) {
if (returnType == null) {
return false;
}
returnType = types.getTypeWithEffectiveIntersectionTypes(returnType);
returnType = Types.getReferredType(returnType);
if (returnType.tag == TypeTags.ERROR) {
return true;
}
if (returnType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) returnType).getMemberTypes()) {
BType t = types.getTypeWithEffectiveIntersectionTypes(memberType);
if (t.tag != TypeTags.ERROR) {
return false;
}
}
return true;
}
return false;
}
@Override
public void visit(BLangLiteral literalExpr, AnalyzerData data) {
}
@Override
public void visit(BLangConstRef constRef, AnalyzerData data) {
}
@Override
public void visit(BLangListConstructorExpr listConstructorExpr, AnalyzerData data) {
for (BLangExpression expr : listConstructorExpr.exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
expr = ((BLangListConstructorSpreadOpExpr) expr).expr;
}
analyzeExpr(expr, data);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr, AnalyzerData data) {
analyzeExprs(tableConstructorExpr.recordLiteralList, data);
}
@Override
public void visit(BLangRecordLiteral recordLiteral, AnalyzerData data) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
analyzeExpr(((BLangRecordKeyValueField) field).valueExpr, data);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
analyzeExpr((BLangRecordLiteral.BLangRecordVarNameField) field, data);
} else {
analyzeExpr(((BLangRecordLiteral.BLangRecordSpreadOperatorField) field).expr, data);
}
}
Set<Object> names = new HashSet<>();
Set<Object> neverTypedKeys = new HashSet<>();
BType literalBType = recordLiteral.getBType();
BType type = Types.getReferredType(literalBType);
boolean isRecord = type.tag == TypeTags.RECORD;
boolean isOpenRecord = isRecord && !((BRecordType) type).sealed;
boolean isInferredRecordForMapCET = isRecord && recordLiteral.expectedType != null &&
recordLiteral.expectedType.tag == TypeTags.MAP;
BLangRecordLiteral.BLangRecordSpreadOperatorField inclusiveTypeSpreadField = null;
for (RecordLiteralNode.RecordField field : fields) {
BLangExpression keyExpr;
if (field.getKind() == NodeKind.RECORD_LITERAL_SPREAD_OP) {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
BLangExpression spreadOpExpr = spreadOpField.expr;
analyzeExpr(spreadOpExpr, data);
BType spreadOpExprType = Types.getReferredType(spreadOpExpr.getBType());
int spreadFieldTypeTag = spreadOpExprType.tag;
if (spreadFieldTypeTag == TypeTags.MAP) {
if (inclusiveTypeSpreadField != null) {
this.dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.MULTIPLE_INCLUSIVE_TYPES);
continue;
}
inclusiveTypeSpreadField = spreadOpField;
if (fields.size() > 1) {
if (names.size() > 0) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.SPREAD_FIELD_MAY_DULPICATE_ALREADY_SPECIFIED_KEYS,
spreadOpExpr);
}
continue;
}
}
if (spreadFieldTypeTag != TypeTags.RECORD) {
continue;
}
BRecordType spreadExprRecordType = (BRecordType) spreadOpExprType;
boolean isSpreadExprRecordTypeSealed = spreadExprRecordType.sealed;
if (!isSpreadExprRecordTypeSealed) {
if (inclusiveTypeSpreadField != null) {
this.dlog.error(spreadOpExpr.pos, DiagnosticErrorCode.MULTIPLE_INCLUSIVE_TYPES);
} else {
inclusiveTypeSpreadField = spreadOpField;
}
}
LinkedHashMap<String, BField> fieldsInRecordType = getUnescapedFieldList(spreadExprRecordType.fields);
for (Object fieldName : names) {
if (!fieldsInRecordType.containsKey(fieldName) && !isSpreadExprRecordTypeSealed) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.SPREAD_FIELD_MAY_DULPICATE_ALREADY_SPECIFIED_KEYS,
spreadOpExpr);
break;
}
}
for (String fieldName : fieldsInRecordType.keySet()) {
BField bField = fieldsInRecordType.get(fieldName);
if (names.contains(fieldName)) {
if (bField.type.tag != TypeTags.NEVER) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.DUPLICATE_KEY_IN_RECORD_LITERAL_SPREAD_OP,
type.getKind().typeName(), fieldName, spreadOpField);
}
continue;
}
if (bField.type.tag == TypeTags.NEVER) {
neverTypedKeys.add(fieldName);
continue;
}
if (!neverTypedKeys.remove(fieldName) &&
inclusiveTypeSpreadField != null && isSpreadExprRecordTypeSealed) {
this.dlog.error(spreadOpExpr.pos,
DiagnosticErrorCode.POSSIBLE_DUPLICATE_OF_FIELD_SPECIFIED_VIA_SPREAD_OP,
Types.getReferredType(recordLiteral.expectedType).getKind().typeName(),
bField.symbol, spreadOpField);
}
names.add(fieldName);
}
} else {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKey key = ((BLangRecordKeyValueField) field).key;
keyExpr = key.expr;
if (key.computedKey) {
analyzeExpr(keyExpr, data);
continue;
}
} else {
keyExpr = (BLangRecordLiteral.BLangRecordVarNameField) field;
}
if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
String name = ((BLangSimpleVarRef) keyExpr).variableName.value;
String unescapedName = Utils.unescapeJava(name);
if (names.contains(unescapedName)) {
this.dlog.error(keyExpr.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_RECORD_LITERAL,
Types.getReferredType(recordLiteral.expectedType).getKind().typeName(),
unescapedName);
} else if (inclusiveTypeSpreadField != null && !neverTypedKeys.contains(unescapedName)) {
this.dlog.error(keyExpr.pos,
DiagnosticErrorCode.POSSIBLE_DUPLICATE_OF_FIELD_SPECIFIED_VIA_SPREAD_OP,
unescapedName, inclusiveTypeSpreadField);
}
if (!isInferredRecordForMapCET && isOpenRecord && !((BRecordType) type).fields.containsKey(name)) {
dlog.error(keyExpr.pos, DiagnosticErrorCode.INVALID_RECORD_LITERAL_IDENTIFIER_KEY,
unescapedName);
}
names.add(unescapedName);
} else if (keyExpr.getKind() == NodeKind.LITERAL || keyExpr.getKind() == NodeKind.NUMERIC_LITERAL) {
Object name = ((BLangLiteral) keyExpr).value;
if (names.contains(name)) {
this.dlog.error(keyExpr.pos, DiagnosticErrorCode.DUPLICATE_KEY_IN_RECORD_LITERAL,
Types.getReferredType(recordLiteral.parent.getBType())
.getKind().typeName(), name);
} else if (inclusiveTypeSpreadField != null && !neverTypedKeys.contains(name)) {
this.dlog.error(keyExpr.pos,
DiagnosticErrorCode.POSSIBLE_DUPLICATE_OF_FIELD_SPECIFIED_VIA_SPREAD_OP,
name, inclusiveTypeSpreadField);
}
names.add(name);
}
}
}
if (isInferredRecordForMapCET) {
recordLiteral.expectedType = type;
}
}
@Override
public void visit(BLangRecordLiteral.BLangRecordVarNameField node, AnalyzerData data) {
visit((BLangSimpleVarRef) node, data);
}
private LinkedHashMap<String, BField> getUnescapedFieldList(LinkedHashMap<String, BField> fieldMap) {
LinkedHashMap<String, BField> newMap = new LinkedHashMap<>();
for (String key : fieldMap.keySet()) {
newMap.put(Utils.unescapeJava(key), fieldMap.get(key));
}
return newMap;
}
@Override
public void visit(BLangSimpleVarRef varRefExpr, AnalyzerData data) {
switch (varRefExpr.parent.getKind()) {
case WORKER_RECEIVE:
case WORKER_SEND:
case WORKER_SYNC_SEND:
return;
default:
if (varRefExpr.getBType() != null && varRefExpr.getBType().tag == TypeTags.FUTURE) {
trackNamedWorkerReferences(varRefExpr, data);
}
}
BSymbol symbol = varRefExpr.symbol;
if (symbol != null && Symbols.isFlagOn(symbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(varRefExpr.variableName.toString(), symbol, varRefExpr.pos);
}
}
private void trackNamedWorkerReferences(BLangSimpleVarRef varRefExpr, AnalyzerData data) {
if (varRefExpr.symbol == null || (varRefExpr.symbol.flags & Flags.WORKER) != Flags.WORKER) {
return;
}
data.workerReferences.computeIfAbsent(varRefExpr.symbol, s -> new LinkedHashSet<>());
data.workerReferences.get(varRefExpr.symbol).add(varRefExpr);
}
@Override
public void visit(BLangRecordVarRef varRefExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangErrorVarRef varRefExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangTupleVarRef varRefExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr, AnalyzerData data) {
analyzeFieldBasedAccessExpr(fieldAccessExpr, data);
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess,
AnalyzerData data) {
analyzeFieldBasedAccessExpr(nsPrefixedFieldBasedAccess, data);
}
private void analyzeFieldBasedAccessExpr(BLangFieldBasedAccess fieldAccessExpr, AnalyzerData data) {
BLangExpression expr = fieldAccessExpr.expr;
analyzeExpr(expr, data);
BSymbol symbol = fieldAccessExpr.symbol;
if (symbol != null && Symbols.isFlagOn(fieldAccessExpr.symbol.flags, Flags.DEPRECATED)) {
String deprecatedConstruct = generateDeprecatedConstructString(expr, fieldAccessExpr.field.toString(),
symbol);
dlog.warning(fieldAccessExpr.pos, DiagnosticWarningCode.USAGE_OF_DEPRECATED_CONSTRUCT, deprecatedConstruct);
}
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr, AnalyzerData data) {
analyzeExpr(indexAccessExpr.indexExpr, data);
analyzeExpr(indexAccessExpr.expr, data);
}
@Override
public void visit(BLangInvocation invocationExpr, AnalyzerData data) {
analyzeExpr(invocationExpr.expr, data);
analyzeExprs(invocationExpr.requiredArgs, data);
analyzeExprs(invocationExpr.restArgs, data);
validateInvocationInMatchGuard(invocationExpr);
if ((invocationExpr.symbol != null) && invocationExpr.symbol.kind == SymbolKind.FUNCTION) {
BSymbol funcSymbol = invocationExpr.symbol;
if (Symbols.isFlagOn(funcSymbol.flags, Flags.TRANSACTIONAL) && !data.withinTransactionScope) {
dlog.error(invocationExpr.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED);
return;
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWarningForInvocation(invocationExpr);
}
}
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr, AnalyzerData data) {
analyzeExprs(errorConstructorExpr.positionalArgs, data);
if (!errorConstructorExpr.namedArgs.isEmpty()) {
analyzeExprs(errorConstructorExpr.namedArgs, data);
}
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation, AnalyzerData data) {
validateInvocationInMatchGuard(actionInvocation);
if (!actionInvocation.async && !data.withinTransactionScope &&
Symbols.isFlagOn(actionInvocation.symbol.flags, Flags.TRANSACTIONAL)) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED,
actionInvocation.symbol);
return;
}
if (actionInvocation.async && data.withinTransactionScope &&
!Symbols.isFlagOn(actionInvocation.symbol.flags, Flags.TRANSACTIONAL)) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.USAGE_OF_START_WITHIN_TRANSACTION_IS_PROHIBITED);
return;
}
analyzeExpr(actionInvocation.expr, data);
analyzeExprs(actionInvocation.requiredArgs, data);
analyzeExprs(actionInvocation.restArgs, data);
if (actionInvocation.symbol != null && actionInvocation.symbol.kind == SymbolKind.FUNCTION &&
Symbols.isFlagOn(actionInvocation.symbol.flags, Flags.DEPRECATED)) {
logDeprecatedWarningForInvocation(actionInvocation);
}
if (actionInvocation.flagSet.contains(Flag.TRANSACTIONAL) && !data.withinTransactionScope) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED);
return;
}
if (actionInvocation.async && data.withinLockBlock) {
dlog.error(actionInvocation.pos, actionInvocation.functionPointerInvocation ?
DiagnosticErrorCode.USAGE_OF_WORKER_WITHIN_LOCK_IS_PROHIBITED :
DiagnosticErrorCode.USAGE_OF_START_WITHIN_LOCK_IS_PROHIBITED);
return;
}
if (actionInvocation.symbol != null &&
(actionInvocation.symbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
dlog.error(actionInvocation.pos, DiagnosticErrorCode.INVALID_FUNCTIONAL_CONSTRUCTOR_INVOCATION,
actionInvocation.symbol);
return;
}
validateActionInvocation(actionInvocation.pos, actionInvocation);
if (!actionInvocation.async && data.withinTransactionScope) {
actionInvocation.invokedInsideTransaction = true;
}
}
@Override
public void visit(BLangInvocation.BLangResourceAccessInvocation resourceActionInvocation, AnalyzerData data) {
validateInvocationInMatchGuard(resourceActionInvocation);
analyzeExpr(resourceActionInvocation.expr, data);
analyzeExprs(resourceActionInvocation.requiredArgs, data);
analyzeExprs(resourceActionInvocation.restArgs, data);
analyzeExpr(resourceActionInvocation.resourceAccessPathSegments, data);
resourceActionInvocation.invokedInsideTransaction = data.withinTransactionScope;
if (Symbols.isFlagOn(resourceActionInvocation.symbol.flags, Flags.TRANSACTIONAL) &&
!data.withinTransactionScope) {
dlog.error(resourceActionInvocation.pos, DiagnosticErrorCode.TRANSACTIONAL_FUNC_INVOKE_PROHIBITED);
return;
}
if (Symbols.isFlagOn(resourceActionInvocation.symbol.flags, Flags.DEPRECATED)) {
logDeprecatedWarningForInvocation(resourceActionInvocation);
}
validateActionInvocation(resourceActionInvocation.pos, resourceActionInvocation);
}
private void logDeprecatedWarningForInvocation(BLangInvocation invocationExpr) {
String deprecatedConstruct = invocationExpr.name.toString();
BLangExpression expr = invocationExpr.expr;
BSymbol funcSymbol = invocationExpr.symbol;
if (expr != null) {
deprecatedConstruct = generateDeprecatedConstructString(expr, deprecatedConstruct, funcSymbol);
} else if (!Names.DOT.equals(funcSymbol.pkgID.name)) {
deprecatedConstruct = funcSymbol.pkgID + ":" + deprecatedConstruct;
}
dlog.warning(invocationExpr.pos, DiagnosticWarningCode.USAGE_OF_DEPRECATED_CONSTRUCT, deprecatedConstruct);
}
private String generateDeprecatedConstructString(BLangExpression expr, String fieldOrMethodName,
BSymbol symbol) {
BType bType = expr.getBType();
if (bType.tag == TypeTags.TYPEREFDESC) {
return bType + "." + fieldOrMethodName;
}
if (bType.tag == TypeTags.OBJECT) {
BObjectType objectType = (BObjectType) bType;
if (objectType.classDef == null || objectType.classDef.internal == false) {
fieldOrMethodName = bType + "." + fieldOrMethodName;
}
return fieldOrMethodName;
}
if (symbol.kind == SymbolKind.FUNCTION && !Names.DOT.equals(symbol.pkgID.name)) {
fieldOrMethodName = symbol.pkgID + ":" + fieldOrMethodName;
}
return fieldOrMethodName;
}
private void validateActionInvocation(Location pos, BLangInvocation iExpr) {
if (iExpr.expr != null) {
final NodeKind clientNodeKind = iExpr.expr.getKind();
if (clientNodeKind == NodeKind.FIELD_BASED_ACCESS_EXPR) {
final BLangFieldBasedAccess fieldBasedAccess = (BLangFieldBasedAccess) iExpr.expr;
if (fieldBasedAccess.expr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
} else {
final BLangSimpleVarRef selfName = (BLangSimpleVarRef) fieldBasedAccess.expr;
if (!Names.SELF.equals(selfName.symbol.name)) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
}
} else if (clientNodeKind != NodeKind.SIMPLE_VARIABLE_REF &&
clientNodeKind != NodeKind.GROUP_EXPR) {
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
}
}
validateActionParentNode(pos, iExpr);
}
/**
* Actions can only occur as part of a statement or nested inside other actions.
*/
private boolean validateActionParentNode(Location pos, BLangNode node) {
BLangNode parent = node.parent;
while (parent != null) {
final NodeKind kind = parent.getKind();
if (parent instanceof StatementNode || checkActionInQuery(kind)) {
return true;
} else if (parent instanceof ActionNode || parent instanceof BLangVariable || kind == NodeKind.CHECK_EXPR ||
kind == NodeKind.CHECK_PANIC_EXPR || kind == NodeKind.TRAP_EXPR || kind == NodeKind.GROUP_EXPR ||
kind == NodeKind.TYPE_CONVERSION_EXPR) {
if (parent instanceof BLangInvocation.BLangActionInvocation) {
break;
}
parent = parent.parent;
continue;
}
break;
}
dlog.error(pos, DiagnosticErrorCode.INVALID_ACTION_INVOCATION_AS_EXPR);
return false;
}
private boolean checkActionInQuery(NodeKind parentKind) {
return parentKind == NodeKind.FROM || parentKind == NodeKind.SELECT ||
parentKind == NodeKind.LET_CLAUSE;
}
@Override
public void visit(BLangTypeInit cIExpr, AnalyzerData data) {
analyzeExprs(cIExpr.argsExpr, data);
analyzeExpr(cIExpr.initInvocation, data);
BType type = cIExpr.getBType();
if (cIExpr.userDefinedType != null && Symbols.isFlagOn(type.tsymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(((BLangUserDefinedType) cIExpr.userDefinedType).typeName.toString(), type.tsymbol,
cIExpr.pos);
}
}
@Override
public void visit(BLangTernaryExpr ternaryExpr, AnalyzerData data) {
analyzeExpr(ternaryExpr.expr, data);
analyzeExpr(ternaryExpr.thenExpr, data);
analyzeExpr(ternaryExpr.elseExpr, data);
}
@Override
public void visit(BLangWaitExpr awaitExpr, AnalyzerData data) {
BLangExpression expr = awaitExpr.getExpression();
boolean validWaitFuture = validateWaitFutureExpr(expr);
analyzeExpr(expr, data);
boolean validActionParent = validateActionParentNode(awaitExpr.pos, awaitExpr);
WorkerActionSystem was = data.workerActionSystemStack.peek();
was.addWorkerAction(awaitExpr, data.env);
was.hasErrors = !(validWaitFuture || validActionParent);
}
@Override
public void visit(BLangWaitForAllExpr waitForAllExpr, AnalyzerData data) {
boolean validWaitFuture = true;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyValue : waitForAllExpr.keyValuePairs) {
BLangExpression expr = keyValue.valueExpr != null ? keyValue.valueExpr : keyValue.keyExpr;
validWaitFuture = validWaitFuture && validateWaitFutureExpr(expr);
analyzeExpr(expr, data);
}
boolean validActionParent = validateActionParentNode(waitForAllExpr.pos, waitForAllExpr);
WorkerActionSystem was = data.workerActionSystemStack.peek();
was.addWorkerAction(waitForAllExpr, data.env);
was.hasErrors = !(validWaitFuture || validActionParent);
}
private boolean validateWaitFutureExpr(BLangExpression expr) {
if (expr.getKind() == NodeKind.RECORD_LITERAL_EXPR) {
dlog.error(expr.pos, DiagnosticErrorCode.INVALID_WAIT_MAPPING_CONSTRUCTORS);
return false;
}
if (expr instanceof ActionNode) {
dlog.error(expr.pos, DiagnosticErrorCode.INVALID_WAIT_ACTIONS);
return false;
}
return true;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess, AnalyzerData data) {
analyzeExpr(xmlElementAccess.expr, data);
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation, AnalyzerData data) {
analyzeExpr(xmlNavigation.expr, data);
if (xmlNavigation.childIndex != null) {
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS
|| xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
dlog.error(xmlNavigation.pos, DiagnosticErrorCode.UNSUPPORTED_MEMBER_ACCESS_IN_XML_NAVIGATION);
}
analyzeExpr(xmlNavigation.childIndex, data);
}
validateMethodInvocationsInXMLNavigationExpression(xmlNavigation);
}
private void validateMethodInvocationsInXMLNavigationExpression(BLangXMLNavigationAccess expression) {
if (!expression.methodInvocationAnalyzed && expression.parent.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expression.parent;
if (invocation.argExprs.contains(expression)
&& ((invocation.symbol.flags & Flags.LANG_LIB) != Flags.LANG_LIB)) {
return;
}
dlog.error(invocation.pos, DiagnosticErrorCode.UNSUPPORTED_METHOD_INVOCATION_XML_NAV);
}
expression.methodInvocationAnalyzed = true;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr, AnalyzerData data) {
BLangIdentifier flushWrkIdentifier = workerFlushExpr.workerIdentifier;
Stack<WorkerActionSystem> workerActionSystems = data.workerActionSystemStack;
WorkerActionSystem currentWrkerAction = workerActionSystems.peek();
List<BLangWorkerSend> sendStmts = getAsyncSendStmtsOfWorker(currentWrkerAction);
if (flushWrkIdentifier != null) {
List<BLangWorkerSend> sendsToGivenWrkr = sendStmts.stream()
.filter(bLangNode -> bLangNode.workerIdentifier.equals
(flushWrkIdentifier))
.collect(Collectors.toList());
if (sendsToGivenWrkr.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.INVALID_WORKER_FLUSH_FOR_WORKER,
workerFlushExpr.workerSymbol, currentWrkerAction.currentWorkerId());
return;
} else {
sendStmts = sendsToGivenWrkr;
}
} else {
if (sendStmts.size() == 0) {
this.dlog.error(workerFlushExpr.pos, DiagnosticErrorCode.INVALID_WORKER_FLUSH,
currentWrkerAction.currentWorkerId());
return;
}
}
workerFlushExpr.cachedWorkerSendStmts = sendStmts;
validateActionParentNode(workerFlushExpr.pos, workerFlushExpr);
}
private List<BLangWorkerSend> getAsyncSendStmtsOfWorker(WorkerActionSystem currentWorkerAction) {
List<BLangNode> actions = currentWorkerAction.workerActionStateMachines.peek().actions;
return actions.stream()
.filter(CodeAnalyzer::isWorkerSend)
.map(bLangNode -> (BLangWorkerSend) bLangNode)
.collect(Collectors.toList());
}
@Override
public void visit(BLangTrapExpr trapExpr, AnalyzerData data) {
analyzeExpr(trapExpr.expr, data);
}
@Override
public void visit(BLangBinaryExpr binaryExpr, AnalyzerData data) {
if (validateBinaryExpr(binaryExpr)) {
analyzeExpr(binaryExpr.lhsExpr, data);
analyzeExpr(binaryExpr.rhsExpr, data);
}
}
private boolean validateBinaryExpr(BLangBinaryExpr binaryExpr) {
if (binaryExpr.lhsExpr.getBType().tag != TypeTags.FUTURE
&& binaryExpr.rhsExpr.getBType().tag != TypeTags.FUTURE) {
return true;
}
BLangNode parentNode = binaryExpr.parent;
if (binaryExpr.lhsExpr.getBType().tag == TypeTags.FUTURE
|| binaryExpr.rhsExpr.getBType().tag == TypeTags.FUTURE) {
if (parentNode == null) {
return false;
}
if (parentNode.getKind() == NodeKind.WAIT_EXPR) {
return true;
}
}
if (parentNode.getKind() != NodeKind.BINARY_EXPR && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
dlog.error(binaryExpr.pos, DiagnosticErrorCode.OPERATOR_NOT_SUPPORTED, OperatorKind.BITWISE_OR,
symTable.futureType);
return false;
}
if (parentNode.getKind() == NodeKind.BINARY_EXPR) {
return validateBinaryExpr((BLangBinaryExpr) parentNode);
}
return true;
}
@Override
public void visit(BLangElvisExpr elvisExpr, AnalyzerData data) {
analyzeExpr(elvisExpr.lhsExpr, data);
analyzeExpr(elvisExpr.rhsExpr, data);
}
@Override
public void visit(BLangGroupExpr groupExpr, AnalyzerData data) {
analyzeExpr(groupExpr.expression, data);
}
@Override
public void visit(BLangUnaryExpr unaryExpr, AnalyzerData data) {
analyzeExpr(unaryExpr.expr, data);
}
@Override
public void visit(BLangTypedescExpr accessExpr, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr, AnalyzerData data) {
analyzeExpr(conversionExpr.expr, data);
conversionExpr.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
@Override
public void visit(BLangXMLQName xmlQName, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangXMLAttribute xmlAttribute, AnalyzerData data) {
analyzeExpr(xmlAttribute.name, data);
analyzeExpr(xmlAttribute.value, data);
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral, AnalyzerData data) {
analyzeExpr(xmlElementLiteral.startTagName, data);
analyzeExpr(xmlElementLiteral.endTagName, data);
analyzeExprs(xmlElementLiteral.attributes, data);
analyzeExprs(xmlElementLiteral.children, data);
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral, AnalyzerData data) {
analyzeExprs(xmlSequenceLiteral.xmlItems, data);
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral, AnalyzerData data) {
analyzeExprs(xmlTextLiteral.textFragments, data);
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral, AnalyzerData data) {
analyzeExprs(xmlCommentLiteral.textFragments, data);
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral, AnalyzerData data) {
analyzeExprs(xmlProcInsLiteral.dataFragments, data);
analyzeExpr(xmlProcInsLiteral.target, data);
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString, AnalyzerData data) {
analyzeExprs(xmlQuotedString.textFragments, data);
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral, AnalyzerData data) {
analyzeExprs(stringTemplateLiteral.exprs, data);
}
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral, AnalyzerData data) {
analyzeExprs(rawTemplateLiteral.strings, data);
analyzeExprs(rawTemplateLiteral.insertions, data);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction, AnalyzerData data) {
boolean isWorker = false;
analyzeNode(bLangLambdaFunction.function, data);
if (bLangLambdaFunction.function.flagSet.contains(Flag.TRANSACTIONAL) &&
bLangLambdaFunction.function.flagSet.contains(Flag.WORKER) && !data.withinTransactionScope) {
dlog.error(bLangLambdaFunction.pos, DiagnosticErrorCode.TRANSACTIONAL_WORKER_OUT_OF_TRANSACTIONAL_SCOPE,
bLangLambdaFunction);
return;
}
if (bLangLambdaFunction.parent.getKind() == NodeKind.VARIABLE) {
String workerVarName = ((BLangSimpleVariable) bLangLambdaFunction.parent).name.value;
if (workerVarName.startsWith(WORKER_LAMBDA_VAR_PREFIX)) {
String workerName = workerVarName.substring(1);
isWorker = true;
data.workerActionSystemStack.peek().startWorkerActionStateMachine(workerName,
bLangLambdaFunction.function.pos,
bLangLambdaFunction.function);
}
}
if (isWorker) {
this.visitFunction(bLangLambdaFunction.function, data);
} else {
try {
this.initNewWorkerActionSystem(data);
data.workerActionSystemStack.peek().startWorkerActionStateMachine(DEFAULT_WORKER_NAME,
bLangLambdaFunction.pos,
bLangLambdaFunction.function);
this.visitFunction(bLangLambdaFunction.function, data);
data.workerActionSystemStack.peek().endWorkerActionStateMachine();
} finally {
this.finalizeCurrentWorkerActionSystem(data);
}
}
if (isWorker) {
data.workerActionSystemStack.peek().endWorkerActionStateMachine();
}
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction, AnalyzerData data) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
if (prevDefaultValueState == DefaultValueState.RECORD_FIELD_DEFAULT ||
prevDefaultValueState == DefaultValueState.OBJECT_FIELD_INITIALIZER) {
data.defaultValueState = DefaultValueState.FUNCTION_IN_DEFAULT_VALUE;
}
analyzeExpr(bLangArrowFunction.body.expr, data);
data.defaultValueState = prevDefaultValueState;
}
/* Type Nodes */
@Override
public void visit(BLangRecordTypeNode recordTypeNode, AnalyzerData data) {
data.env = SymbolEnv.createTypeEnv(recordTypeNode, recordTypeNode.symbol.scope, data.env);
for (BLangSimpleVariable field : recordTypeNode.fields) {
DefaultValueState prevDefaultValueState = data.defaultValueState;
data.defaultValueState = DefaultValueState.RECORD_FIELD_DEFAULT;
analyzeNode(field, data);
data.defaultValueState = prevDefaultValueState;
}
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode, AnalyzerData data) {
data.env = SymbolEnv.createTypeEnv(objectTypeNode, objectTypeNode.symbol.scope, data.env);
for (BLangSimpleVariable field : objectTypeNode.fields) {
analyzeNode(field, data);
}
List<BLangFunction> bLangFunctionList = new ArrayList<>(objectTypeNode.functions);
if (objectTypeNode.initFunction != null) {
bLangFunctionList.add(objectTypeNode.initFunction);
}
bLangFunctionList.sort(Comparator.comparingInt(function -> function.pos.lineRange().startLine().line()));
for (BLangFunction function : bLangFunctionList) {
analyzeNode(function, data);
}
}
@Override
public void visit(BLangValueType valueType, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangArrayType arrayType, AnalyzerData data) {
if (containsInferredArraySizesOfHigherDimensions(arrayType.sizes)) {
dlog.error(arrayType.pos, DiagnosticErrorCode.INFER_SIZE_ONLY_SUPPORTED_IN_FIRST_DIMENSION);
} else if (isSizeInferredArray(arrayType.sizes) && !isValidInferredArray(arrayType.parent)) {
dlog.error(arrayType.pos, DiagnosticErrorCode.CANNOT_INFER_SIZE_ARRAY_SIZE_FROM_THE_CONTEXT);
}
analyzeTypeNode(arrayType.elemtype, data);
}
private boolean isSizeInferredArray(List<BLangExpression> indexSizes) {
return !indexSizes.isEmpty() && isInferredArrayIndicator(indexSizes.get(indexSizes.size() - 1));
}
private boolean isInferredArrayIndicator(BLangExpression size) {
return size.getKind() == LITERAL && ((BLangLiteral) size).value.equals(Constants.INFERRED_ARRAY_INDICATOR);
}
private boolean containsInferredArraySizesOfHigherDimensions(List<BLangExpression> sizes) {
if (sizes.size() < 2) {
return false;
}
for (int i = 0; i < sizes.size() - 1; i++) {
if (isInferredArrayIndicator(sizes.get(i))) {
return true;
}
}
return false;
}
@Override
public void visit(BLangBuiltInRefTypeNode builtInRefType, AnalyzerData data) {
/* ignore */
}
@Override
public void visit(BLangConstrainedType constrainedType, AnalyzerData data) {
analyzeTypeNode(constrainedType.constraint, data);
}
@Override
public void visit(BLangStreamType streamType, AnalyzerData data) {
analyzeTypeNode(streamType.constraint, data);
analyzeTypeNode(streamType.error, data);
}
@Override
public void visit(BLangTableTypeNode tableType, AnalyzerData data) {
analyzeTypeNode(tableType.constraint, data);
if (tableType.tableKeyTypeConstraint != null) {
analyzeTypeNode(tableType.tableKeyTypeConstraint.keyType, data);
}
}
@Override
public void visit(BLangErrorType errorType, AnalyzerData data) {
BLangType detailType = errorType.detailType;
if (detailType != null && detailType.getKind() == NodeKind.CONSTRAINED_TYPE) {
BLangType constraint = ((BLangConstrainedType) detailType).constraint;
if (constraint.getKind() == NodeKind.USER_DEFINED_TYPE) {
BLangUserDefinedType userDefinedType = (BLangUserDefinedType) constraint;
if (userDefinedType.typeName.value.equals(TypeDefBuilderHelper.INTERSECTED_ERROR_DETAIL)) {
return;
}
}
}
analyzeTypeNode(errorType.detailType, data);
}
@Override
public void visit(BLangUserDefinedType userDefinedType, AnalyzerData data) {
BTypeSymbol typeSymbol = userDefinedType.getBType().tsymbol;
if (typeSymbol != null && Symbols.isFlagOn(typeSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(userDefinedType.typeName.toString(), typeSymbol, userDefinedType.pos);
}
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode, AnalyzerData data) {
tupleTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, data));
analyzeTypeNode(tupleTypeNode.restParamType, data);
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode, AnalyzerData data) {
unionTypeNode.memberTypeNodes.forEach(memberType -> analyzeTypeNode(memberType, data));
}
@Override
public void visit(BLangIntersectionTypeNode intersectionTypeNode, AnalyzerData data) {
for (BLangType constituentTypeNode : intersectionTypeNode.constituentTypeNodes) {
analyzeTypeNode(constituentTypeNode, data);
}
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode, AnalyzerData data) {
if (functionTypeNode.flagSet.contains(Flag.ANY_FUNCTION)) {
return;
}
functionTypeNode.params.forEach(node -> analyzeNode(node, data));
analyzeTypeNode(functionTypeNode.returnTypeNode, data);
}
@Override
public void visit(BLangFiniteTypeNode finiteTypeNode, AnalyzerData data) {
/* Ignore */
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression, AnalyzerData data) {
analyzeExpr(bLangVarArgsExpression.expr, data);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression, AnalyzerData data) {
analyzeExpr(bLangNamedArgsExpression.expr, data);
}
@Override
public void visit(BLangCheckedExpr checkedExpr, AnalyzerData data) {
data.failVisited = true;
analyzeExpr(checkedExpr.expr, data);
if (data.env.scope.owner.getKind() == SymbolKind.PACKAGE) {
return;
}
BLangInvokableNode enclInvokable = data.env.enclInvokable;
List<BType> equivalentErrorTypeList = checkedExpr.equivalentErrorTypeList;
if (equivalentErrorTypeList != null && !equivalentErrorTypeList.isEmpty()) {
if (data.defaultValueState == DefaultValueState.RECORD_FIELD_DEFAULT) {
dlog.error(checkedExpr.pos,
DiagnosticErrorCode.INVALID_USAGE_OF_CHECK_IN_RECORD_FIELD_DEFAULT_EXPRESSION);
return;
}
if (data.defaultValueState == DefaultValueState.OBJECT_FIELD_INITIALIZER) {
BAttachedFunction initializerFunc =
((BObjectTypeSymbol) getEnclosingClass(data.env).getBType().tsymbol).initializerFunc;
if (initializerFunc == null) {
dlog.error(checkedExpr.pos,
DiagnosticErrorCode
.INVALID_USAGE_OF_CHECK_IN_OBJECT_FIELD_INITIALIZER_IN_OBJECT_WITH_NO_INIT_METHOD);
return;
}
BType exprErrorTypes = getErrorTypes(checkedExpr.expr.getBType());
BType initMethodReturnType = initializerFunc.type.retType;
if (!types.isAssignable(exprErrorTypes, initMethodReturnType)) {
dlog.error(checkedExpr.pos, DiagnosticErrorCode
.INVALID_USAGE_OF_CHECK_IN_OBJECT_FIELD_INITIALIZER_WITH_INIT_METHOD_RETURN_TYPE_MISMATCH,
initMethodReturnType, exprErrorTypes);
}
return;
}
}
if (enclInvokable == null) {
return;
}
BType exprType = enclInvokable.getReturnTypeNode().getBType();
BType checkedExprType = checkedExpr.expr.getBType();
BType errorType = getErrorTypes(checkedExprType);
if (errorType == symTable.semanticError) {
return;
}
if (!data.failureHandled && !types.isAssignable(errorType, exprType) &&
!types.isNeverTypeOrStructureTypeWithARequiredNeverMember(checkedExprType)) {
dlog.error(checkedExpr.pos,
DiagnosticErrorCode.CHECKED_EXPR_NO_MATCHING_ERROR_RETURN_IN_ENCL_INVOKABLE);
}
if (!data.errorTypes.empty()) {
data.errorTypes.peek().add(getErrorTypes(checkedExpr.expr.getBType()));
}
BType errorTypes;
if (exprType.tag == TypeTags.UNION) {
errorTypes = types.getErrorType((BUnionType) exprType);
} else {
errorTypes = exprType;
}
data.returnTypes.peek().add(errorTypes);
}
@Override
public void visit(BLangCheckPanickedExpr checkPanicExpr, AnalyzerData data) {
analyzeExpr(checkPanicExpr.expr, data);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr, AnalyzerData data) {
}
@Override
public void visit(BLangQueryExpr queryExpr, AnalyzerData data) {
boolean prevQueryToTableWithKey = data.queryToTableWithKey;
data.queryToTableWithKey = queryExpr.isTable() && !queryExpr.fieldNameIdentifierList.isEmpty();
data.queryToMap = queryExpr.isMap;
boolean prevWithinQuery = data.withinQuery;
data.withinQuery = true;
int fromCount = 0;
for (BLangNode clause : queryExpr.getQueryClauses()) {
if (clause.getKind() == NodeKind.FROM) {
fromCount++;
BLangFromClause fromClause = (BLangFromClause) clause;
BLangExpression collection = (BLangExpression) fromClause.getCollection();
if (fromCount > 1) {
if (TypeTags.STREAM == Types.getReferredType(collection.getBType()).tag) {
this.dlog.error(collection.pos, DiagnosticErrorCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM);
}
}
}
analyzeNode(clause, data);
}
data.withinQuery = prevWithinQuery;
data.queryToTableWithKey = prevQueryToTableWithKey;
}
@Override
public void visit(BLangQueryAction queryAction, AnalyzerData data) {
boolean prevFailureHandled = data.failureHandled;
data.failureHandled = true;
boolean prevWithinQuery = data.withinQuery;
data.withinQuery = true;
int fromCount = 0;
for (BLangNode clause : queryAction.getQueryClauses()) {
if (clause.getKind() == NodeKind.FROM) {
fromCount++;
BLangFromClause fromClause = (BLangFromClause) clause;
BLangExpression collection = (BLangExpression) fromClause.getCollection();
if (fromCount > 1) {
if (TypeTags.STREAM == Types.getReferredType(collection.getBType()).tag) {
this.dlog.error(collection.pos, DiagnosticErrorCode.NOT_ALLOWED_STREAM_USAGE_WITH_FROM);
}
}
}
analyzeNode(clause, data);
}
validateActionParentNode(queryAction.pos, queryAction);
data.failureHandled = prevFailureHandled;
data.withinQuery = prevWithinQuery;
}
@Override
public void visit(BLangFromClause fromClause, AnalyzerData data) {
analyzeExpr(fromClause.collection, data);
}
@Override
public void visit(BLangJoinClause joinClause, AnalyzerData data) {
analyzeExpr(joinClause.collection, data);
if (joinClause.onClause != null) {
analyzeNode(joinClause.onClause, data);
}
}
@Override
public void visit(BLangLetClause letClause, AnalyzerData data) {
for (BLangLetVariable letVariable : letClause.letVarDeclarations) {
analyzeNode((BLangNode) letVariable.definitionNode.getVariable(), data);
}
}
@Override
public void visit(BLangWhereClause whereClause, AnalyzerData data) {
analyzeExpr(whereClause.expression, data);
}
@Override
public void visit(BLangOnClause onClause, AnalyzerData data) {
analyzeExpr(onClause.lhsExpr, data);
analyzeExpr(onClause.rhsExpr, data);
}
@Override
public void visit(BLangOrderByClause orderByClause, AnalyzerData data) {
orderByClause.orderByKeyList.forEach(value -> analyzeExpr((BLangExpression) value.getOrderKey(), data));
}
@Override
public void visit(BLangSelectClause selectClause, AnalyzerData data) {
analyzeExpr(selectClause.expression, data);
}
@Override
public void visit(BLangOnConflictClause onConflictClause, AnalyzerData data) {
analyzeExpr(onConflictClause.expression, data);
if (!(data.queryToTableWithKey || data.queryToMap)) {
dlog.error(onConflictClause.pos,
DiagnosticErrorCode.ON_CONFLICT_ONLY_WORKS_WITH_MAPS_OR_TABLES_WITH_KEY_SPECIFIER);
}
}
@Override
public void visit(BLangDoClause doClause, AnalyzerData data) {
analyzeNode(doClause.body, data);
}
@Override
public void visit(BLangOnFailClause onFailClause, AnalyzerData data) {
boolean currentFailVisited = data.failVisited;
data.failVisited = false;
VariableDefinitionNode onFailVarDefNode = onFailClause.variableDefinitionNode;
if (onFailVarDefNode != null) {
BLangVariable onFailVarNode = (BLangVariable) onFailVarDefNode.getVariable();
for (BType errorType : data.errorTypes.peek()) {
if (!types.isAssignable(errorType, onFailVarNode.getBType())) {
dlog.error(onFailVarNode.pos, DiagnosticErrorCode.INCOMPATIBLE_ON_FAIL_ERROR_DEFINITION, errorType,
onFailVarNode.getBType());
}
}
}
analyzeNode(onFailClause.body, data);
onFailClause.bodyContainsFail = data.failVisited;
data.failVisited = currentFailVisited;
}
@Override
public void visit(BLangLimitClause limitClause, AnalyzerData data) {
analyzeExpr(limitClause.expression, data);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr, AnalyzerData data) {
BLangExpression expr = typeTestExpr.expr;
analyzeNode(expr, data);
BType exprType = expr.getBType();
BType typeNodeType = typeTestExpr.typeNode.getBType();
if (typeNodeType == symTable.semanticError || exprType == symTable.semanticError) {
return;
}
if (types.isAssignable(exprType, typeNodeType)) {
if (typeTestExpr.isNegation) {
dlog.hint(typeTestExpr.pos, DiagnosticHintCode.EXPRESSION_ALWAYS_FALSE);
return;
}
if (types.isNeverTypeOrStructureTypeWithARequiredNeverMember(exprType)) {
dlog.hint(typeTestExpr.pos, DiagnosticHintCode.UNNECESSARY_CONDITION_FOR_VARIABLE_OF_TYPE_NEVER);
return;
}
dlog.hint(typeTestExpr.pos, DiagnosticHintCode.UNNECESSARY_CONDITION);
return;
}
if (!intersectionExists(expr, typeNodeType, data, typeTestExpr.pos)) {
dlog.error(typeTestExpr.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CHECK, exprType, typeNodeType);
}
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr, AnalyzerData data) {
analyzeExpr(annotAccessExpr.expr, data);
BAnnotationSymbol annotationSymbol = annotAccessExpr.annotationSymbol;
if (annotationSymbol != null && Symbols.isFlagOn(annotationSymbol.flags, Flags.DEPRECATED)) {
logDeprecatedWaring(annotAccessExpr.annotationName.toString(), annotationSymbol, annotAccessExpr.pos);
}
}
@Override
public void visit(BLangRegExpTemplateLiteral regExpTemplateLiteral, AnalyzerData data) {
List<BLangExpression> interpolationsList =
symResolver.getListOfInterpolations(regExpTemplateLiteral.reDisjunction.sequenceList);
interpolationsList.forEach(interpolation -> analyzeExpr(interpolation, data));
}
private void logDeprecatedWaring(String deprecatedConstruct, BSymbol symbol, Location pos) {
if (!Names.DOT.equals(symbol.pkgID.name)) {
deprecatedConstruct = symbol.pkgID + ":" + deprecatedConstruct;
}
dlog.warning(pos, DiagnosticWarningCode.USAGE_OF_DEPRECATED_CONSTRUCT, deprecatedConstruct);
}
private boolean intersectionExists(BLangExpression expression, BType testType, AnalyzerData data,
Location intersectionPos) {
BType expressionType = expression.getBType();
BType intersectionType = types.getTypeIntersection(
Types.IntersectionContext.typeTestIntersectionExistenceContext(intersectionPos),
expressionType, testType, data.env);
return (intersectionType != symTable.semanticError) ||
(expressionType.tag == TypeTags.ANY && testType.tag == TypeTags.READONLY);
}
@Override
public void visit(BLangInferredTypedescDefaultNode inferTypedescExpr, AnalyzerData data) {
/* Ignore */
}
private <E extends BLangExpression> void analyzeExpr(E node, AnalyzerData data) {
if (node == null) {
return;
}
SymbolEnv prevEnv = data.env;
BLangNode parent = data.parent;
node.parent = data.parent;
data.parent = node;
node.accept(this, data);
data.parent = parent;
checkAccess(node, data);
checkExpressionValidity(node, data);
data.env = prevEnv;
}
private <E extends BLangExpression> void checkExpressionValidity(E exprNode, AnalyzerData data) {
if (exprNode.getKind() == NodeKind.GROUP_EXPR ||
!types.isNeverTypeOrStructureTypeWithARequiredNeverMember(exprNode.getBType())) {
return;
}
if (!checkExpressionInValidParent(exprNode.parent, data)) {
dlog.error(exprNode.pos, DiagnosticErrorCode.EXPRESSION_OF_NEVER_TYPE_NOT_ALLOWED);
}
}
private boolean checkExpressionInValidParent(BLangNode currentParent, AnalyzerData data) {
if (currentParent == null) {
return false;
}
if (currentParent.getKind() == NodeKind.GROUP_EXPR) {
return checkExpressionInValidParent(currentParent.parent, data);
}
return currentParent.getKind() == NodeKind.EXPRESSION_STATEMENT ||
(currentParent.getKind() == NodeKind.VARIABLE &&
((BLangSimpleVariable) data.parent).typeNode.getBType().tag == TypeTags.FUTURE)
|| currentParent.getKind() == NodeKind.TRAP_EXPR;
}
@Override
public void visit(BLangConstant constant, AnalyzerData data) {
analyzeTypeNode(constant.typeNode, data);
analyzeNode(constant.expr, data);
analyzeExportableTypeRef(constant.symbol, constant.symbol.type.tsymbol, false, constant.pos);
constant.annAttachments.forEach(annotationAttachment -> analyzeNode(annotationAttachment, data));
}
/**
* This method checks for private symbols being accessed or used outside of package and|or private symbols being
* used in public fields of objects/records and will fail those occurrences.
*
* @param node expression node to analyze
* @param data data used to analyze the node
*/
private <E extends BLangExpression> void checkAccess(E node, AnalyzerData data) {
if (node.getBType() != null) {
checkAccessSymbol(node.getBType().tsymbol, data.env.enclPkg.symbol.pkgID, node.pos);
}
if (node.getKind() == NodeKind.INVOCATION) {
BLangInvocation bLangInvocation = (BLangInvocation) node;
checkAccessSymbol(bLangInvocation.symbol, data.env.enclPkg.symbol.pkgID, bLangInvocation.pos);
}
}
private void checkAccessSymbol(BSymbol symbol, PackageID pkgID, Location position) {
if (symbol == null) {
return;
}
if (!pkgID.equals(symbol.pkgID) && !Symbols.isPublic(symbol)) {
dlog.error(position, DiagnosticErrorCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, symbol.name);
}
}
private <E extends BLangExpression> void analyzeExprs(List<E> nodeList, AnalyzerData data) {
for (int i = 0; i < nodeList.size(); i++) {
analyzeExpr(nodeList.get(i), data);
}
}
private void initNewWorkerActionSystem(AnalyzerData data) {
data.workerActionSystemStack.push(new WorkerActionSystem());
}
private void finalizeCurrentWorkerActionSystem(AnalyzerData data) {
WorkerActionSystem was = data.workerActionSystemStack.pop();
if (!was.hasErrors) {
this.validateWorkerInteractions(was, data);
}
}
private static boolean isWorkerSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SEND;
}
private static boolean isWorkerSyncSend(BLangNode action) {
return action.getKind() == NodeKind.WORKER_SYNC_SEND;
}
private static boolean isWaitAction(BLangNode action) {
return action.getKind() == NodeKind.WAIT_EXPR;
}
private String extractWorkerId(BLangNode action) {
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).workerIdentifier.value;
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).workerIdentifier.value;
} else {
return ((BLangWorkerReceive) action).workerIdentifier.value;
}
}
private void validateWorkerInteractions(WorkerActionSystem workerActionSystem, AnalyzerData data) {
if (!validateWorkerInteractionsAfterWaitAction(workerActionSystem)) {
return;
}
BLangNode currentAction;
boolean systemRunning;
data.workerSystemMovementSequence = 0;
int systemIterationCount = 0;
int prevWorkerSystemMovementSequence = data.workerSystemMovementSequence;
do {
systemRunning = false;
systemIterationCount++;
for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) {
if (worker.done()) {
continue;
}
currentAction = worker.currentAction();
if (isWaitAction(currentAction)) {
handleWaitAction(workerActionSystem, currentAction, worker, data);
systemRunning = true;
continue;
}
if (!isWorkerSend(currentAction) && !isWorkerSyncSend(currentAction)) {
continue;
}
WorkerActionStateMachine otherSM = workerActionSystem.find(this.extractWorkerId(currentAction));
if (otherSM.done()) {
continue;
}
if (isWaitAction(otherSM.currentAction())) {
systemRunning = false;
continue;
}
if (!otherSM.currentIsReceive(worker.workerId)) {
continue;
}
BLangWorkerReceive receive = (BLangWorkerReceive) otherSM.currentAction();
if (isWorkerSyncSend(currentAction)) {
this.validateWorkerActionParameters((BLangWorkerSyncSendExpr) currentAction, receive);
} else {
this.validateWorkerActionParameters((BLangWorkerSend) currentAction, receive);
}
otherSM.next();
data.workerSystemMovementSequence++;
worker.next();
data.workerSystemMovementSequence++;
systemRunning = true;
String channelName = generateChannelName(worker.workerId, otherSM.workerId);
otherSM.node.sendsToThis.add(channelName);
worker.node.sendsToThis.add(channelName);
}
if (systemIterationCount > workerActionSystem.finshedWorkers.size()) {
systemIterationCount = 0;
if (prevWorkerSystemMovementSequence == data.workerSystemMovementSequence) {
systemRunning = false;
}
prevWorkerSystemMovementSequence = data.workerSystemMovementSequence;
}
} while (systemRunning);
if (!workerActionSystem.everyoneDone()) {
this.reportInvalidWorkerInteractionDiagnostics(workerActionSystem);
}
}
private boolean validateWorkerInteractionsAfterWaitAction(WorkerActionSystem workerActionSystem) {
boolean isValid = true;
for (WorkerActionStateMachine worker : workerActionSystem.finshedWorkers) {
Set<String> waitingOnWorkerSet = new HashSet<>();
for (BLangNode action : worker.actions) {
if (isWaitAction(action)) {
if (action instanceof BLangWaitForAllExpr) {
BLangWaitForAllExpr waitForAllExpr = (BLangWaitForAllExpr) action;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair : waitForAllExpr.keyValuePairs) {
BSymbol workerSymbol = getWorkerSymbol(keyValuePair);
if (workerSymbol != null) {
waitingOnWorkerSet.add(workerSymbol.name.value);
}
}
} else {
BLangWaitExpr wait = (BLangWaitExpr) action;
for (String workerName : getWorkerNameList(wait.exprList.get(0),
workerActionSystem.getActionEnvironment(wait))) {
waitingOnWorkerSet.add(workerName);
}
}
} else if (isWorkerSend(action)) {
BLangWorkerSend send = (BLangWorkerSend) action;
if (waitingOnWorkerSet.contains(send.workerIdentifier.value)) {
dlog.error(action.pos, DiagnosticErrorCode.WORKER_INTERACTION_AFTER_WAIT_ACTION, action);
isValid = false;
}
} else if (isWorkerSyncSend(action)) {
BLangWorkerSyncSendExpr syncSend = (BLangWorkerSyncSendExpr) action;
if (waitingOnWorkerSet.contains(syncSend.workerIdentifier.value)) {
dlog.error(action.pos, DiagnosticErrorCode.WORKER_INTERACTION_AFTER_WAIT_ACTION, action);
isValid = false;
}
} else if (action.getKind() == NodeKind.WORKER_RECEIVE) {
BLangWorkerReceive receive = (BLangWorkerReceive) action;
if (waitingOnWorkerSet.contains(receive.workerIdentifier.value)) {
dlog.error(action.pos, DiagnosticErrorCode.WORKER_INTERACTION_AFTER_WAIT_ACTION, action);
isValid = false;
}
}
}
}
return isValid;
}
private void handleWaitAction(WorkerActionSystem workerActionSystem, BLangNode currentAction,
WorkerActionStateMachine worker, AnalyzerData data) {
if (currentAction instanceof BLangWaitForAllExpr) {
boolean allWorkersAreDone = true;
BLangWaitForAllExpr waitForAllExpr = (BLangWaitForAllExpr) currentAction;
for (BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair : waitForAllExpr.keyValuePairs) {
BSymbol workerSymbol = getWorkerSymbol(keyValuePair);
if (isWorkerSymbol(workerSymbol)) {
Name workerName = workerSymbol.name;
if (isWorkerFromFunction(workerActionSystem.getActionEnvironment(currentAction), workerName)) {
WorkerActionStateMachine otherSM = workerActionSystem.find(workerName.value);
allWorkersAreDone = allWorkersAreDone && otherSM.done();
}
}
}
if (allWorkersAreDone) {
worker.next();
data.workerSystemMovementSequence++;
}
} else {
BLangWaitExpr wait = (BLangWaitExpr) currentAction;
List<String> workerNameList = getWorkerNameList(wait.exprList.get(0),
workerActionSystem.getActionEnvironment(currentAction));
if (workerNameList.isEmpty()) {
worker.next();
data.workerSystemMovementSequence++;
}
for (String workerName : workerNameList) {
var otherSM = workerActionSystem.find(workerName);
if (otherSM.done()) {
worker.next();
data.workerSystemMovementSequence++;
break;
}
}
}
}
private BSymbol getWorkerSymbol(BLangWaitForAllExpr.BLangWaitKeyValue keyValuePair) {
BLangExpression value = keyValuePair.getValue();
if (value != null && value.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) value).symbol;
} else if (keyValuePair.keyExpr != null && keyValuePair.keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyValuePair.keyExpr).symbol;
}
return null;
}
private List<String> getWorkerNameList(BLangExpression expr, SymbolEnv functionEnv) {
ArrayList<String> workerNames = new ArrayList<>();
populateWorkerNameList(expr, workerNames, functionEnv);
return workerNames;
}
private void populateWorkerNameList(BLangExpression expr, ArrayList<String> workerNames, SymbolEnv functionEnv) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) expr;
populateWorkerNameList(binaryExpr.lhsExpr, workerNames, functionEnv);
populateWorkerNameList(binaryExpr.rhsExpr, workerNames, functionEnv);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) expr;
if (isWorkerSymbol(varRef.symbol) && isWorkerFromFunction(functionEnv, varRef.symbol.name)) {
workerNames.add(varRef.variableName.value);
}
}
}
private boolean isWorkerFromFunction(SymbolEnv functionEnv, Name workerName) {
if (functionEnv == null) {
return false;
}
if (functionEnv.scope.lookup(workerName).symbol != null) {
return true;
}
if (functionEnv.enclInvokable != null) {
Set<Flag> flagSet = functionEnv.enclInvokable.flagSet;
if (flagSet.contains(Flag.LAMBDA) && !flagSet.contains(Flag.WORKER)) {
return false;
}
}
return isWorkerFromFunction(functionEnv.enclEnv, workerName);
}
private boolean isWorkerSymbol(BSymbol symbol) {
return symbol != null && (symbol.flags & Flags.WORKER) == Flags.WORKER;
}
private void reportInvalidWorkerInteractionDiagnostics(WorkerActionSystem workerActionSystem) {
this.dlog.error(workerActionSystem.getRootPosition(), DiagnosticErrorCode.INVALID_WORKER_INTERACTION,
workerActionSystem.toString());
}
private void validateWorkerActionParameters(BLangWorkerSend send, BLangWorkerReceive receive) {
types.checkType(receive, send.getBType(), receive.getBType());
addImplicitCast(send.getBType(), receive);
NodeKind kind = receive.parent.getKind();
if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR ||
kind == NodeKind.FAIL) {
typeChecker.checkExpr((BLangExpression) receive.parent, receive.env);
}
receive.sendExpression = send.expr;
}
private void validateWorkerActionParameters(BLangWorkerSyncSendExpr send, BLangWorkerReceive receive) {
send.receive = receive;
NodeKind parentNodeKind = send.parent.getKind();
if (parentNodeKind == NodeKind.VARIABLE) {
BLangSimpleVariable variable = (BLangSimpleVariable) send.parent;
if (variable.isDeclaredWithVar) {
variable.setBType(variable.symbol.type = send.expectedType = receive.matchingSendsError);
}
} else if (parentNodeKind == NodeKind.ASSIGNMENT) {
BLangAssignment assignment = (BLangAssignment) send.parent;
if (assignment.varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol varSymbol = ((BLangSimpleVarRef) assignment.varRef).symbol;
if (varSymbol != null) {
send.expectedType = varSymbol.type;
}
}
}
if (receive.matchingSendsError != symTable.nilType && parentNodeKind == NodeKind.EXPRESSION_STATEMENT) {
dlog.error(send.pos, DiagnosticErrorCode.ASSIGNMENT_REQUIRED, send.workerSymbol);
} else {
types.checkType(send.pos, receive.matchingSendsError, send.expectedType,
DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
types.checkType(receive, send.getBType(), receive.getBType());
addImplicitCast(send.getBType(), receive);
NodeKind kind = receive.parent.getKind();
if (kind == NodeKind.TRAP_EXPR || kind == NodeKind.CHECK_EXPR || kind == NodeKind.CHECK_PANIC_EXPR) {
typeChecker.checkExpr((BLangExpression) receive.parent, receive.env);
}
receive.sendExpression = send;
}
private void addImplicitCast(BType actualType, BLangWorkerReceive receive) {
if (receive.getBType() != null && receive.getBType() != symTable.semanticError) {
types.setImplicitCastExpr(receive, actualType, receive.getBType());
receive.setBType(actualType);
}
}
private boolean checkNextBreakValidityInTransaction(AnalyzerData data) {
return !data.loopWithinTransactionCheckStack.peek() && data.transactionCount > 0 && data.withinTransactionScope;
}
private boolean checkReturnValidityInTransaction(AnalyzerData data) {
return !data.returnWithinTransactionCheckStack.peek() && data.transactionCount > 0
&& data.withinTransactionScope;
}
private void validateModuleInitFunction(BLangFunction funcNode) {
if (funcNode.attachedFunction || !Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcNode.name.value)) {
return;
}
if (Symbols.isPublic(funcNode.symbol)) {
this.dlog.error(funcNode.pos, DiagnosticErrorCode.MODULE_INIT_CANNOT_BE_PUBLIC);
}
if (!funcNode.requiredParams.isEmpty() || funcNode.restParam != null) {
this.dlog.error(funcNode.pos, DiagnosticErrorCode.MODULE_INIT_CANNOT_HAVE_PARAMS);
}
types.validateErrorOrNilReturn(funcNode, DiagnosticErrorCode.MODULE_INIT_RETURN_SHOULD_BE_ERROR_OR_NIL);
}
private BType getErrorTypes(BType bType) {
if (bType == null) {
return symTable.semanticError;
}
BType errorType = symTable.semanticError;
int tag = bType.tag;
if (tag == TypeTags.TYPEREFDESC) {
return getErrorTypes(Types.getReferredType(bType));
}
if (tag == TypeTags.ERROR) {
errorType = bType;
} else if (tag == TypeTags.READONLY) {
errorType = symTable.errorType;
} else if (tag == TypeTags.UNION) {
LinkedHashSet<BType> errTypes = new LinkedHashSet<>();
Set<BType> memTypes = ((BUnionType) bType).getMemberTypes();
for (BType memType : memTypes) {
BType memErrType = getErrorTypes(memType);
if (memErrType != symTable.semanticError) {
errTypes.add(memErrType);
}
}
if (!errTypes.isEmpty()) {
errorType = errTypes.size() == 1 ? errTypes.iterator().next() : BUnionType.create(null, errTypes);
}
}
return errorType;
}
/**
* This class contains the state machines for a set of workers.
*/
private static class WorkerActionSystem {
public List<WorkerActionStateMachine> finshedWorkers = new ArrayList<>();
private Stack<WorkerActionStateMachine> workerActionStateMachines = new Stack<>();
private Map<BLangNode, SymbolEnv> workerInteractionEnvironments = new IdentityHashMap<>();
private boolean hasErrors = false;
public void startWorkerActionStateMachine(String workerId, Location pos, BLangFunction node) {
workerActionStateMachines.push(new WorkerActionStateMachine(pos, workerId, node));
}
public void endWorkerActionStateMachine() {
finshedWorkers.add(workerActionStateMachines.pop());
}
public void addWorkerAction(BLangNode action) {
this.workerActionStateMachines.peek().actions.add(action);
}
public WorkerActionStateMachine find(String workerId) {
for (WorkerActionStateMachine worker : this.finshedWorkers) {
if (worker.workerId.equals(workerId)) {
return worker;
}
}
throw new AssertionError("Reference to non existing worker " + workerId);
}
public boolean everyoneDone() {
return this.finshedWorkers.stream().allMatch(WorkerActionStateMachine::done);
}
public Location getRootPosition() {
return this.finshedWorkers.iterator().next().pos;
}
@Override
public String toString() {
return this.finshedWorkers.toString();
}
public String currentWorkerId() {
return workerActionStateMachines.peek().workerId;
}
public void addWorkerAction(BLangNode action, SymbolEnv env) {
addWorkerAction(action);
this.workerInteractionEnvironments.put(action, env);
}
private SymbolEnv getActionEnvironment(BLangNode currentAction) {
return workerInteractionEnvironments.get(currentAction);
}
}
/**
* This class represents a state machine to maintain the state of the send/receive
* actions of a worker.
*/
private static class WorkerActionStateMachine {
private static final String WORKER_SM_FINISHED = "FINISHED";
public int currentState;
public List<BLangNode> actions = new ArrayList<>();
public Location pos;
public String workerId;
public BLangFunction node;
public WorkerActionStateMachine(Location pos, String workerId, BLangFunction node) {
this.pos = pos;
this.workerId = workerId;
this.node = node;
}
public boolean done() {
return this.actions.size() == this.currentState;
}
public BLangNode currentAction() {
return this.actions.get(this.currentState);
}
public boolean currentIsReceive(String sourceWorkerId) {
if (this.done()) {
return false;
}
BLangNode action = this.currentAction();
return !isWorkerSend(action) && !isWorkerSyncSend(action) && !isWaitAction(action)
&& ((BLangWorkerReceive) action).workerIdentifier.value.equals(sourceWorkerId);
}
public void next() {
this.currentState++;
}
@Override
public String toString() {
if (this.done()) {
return WORKER_SM_FINISHED;
} else {
BLangNode action = this.currentAction();
if (isWorkerSend(action)) {
return ((BLangWorkerSend) action).toActionString();
} else if (isWorkerSyncSend(action)) {
return ((BLangWorkerSyncSendExpr) action).toActionString();
} else if (isWaitAction(action)) {
return action.toString();
} else {
return ((BLangWorkerReceive) action).toActionString();
}
}
}
}
public static String generateChannelName(String source, String target) {
return source + "->" + target;
}
private BLangNode getEnclosingClass(SymbolEnv env) {
BLangNode node = env.node;
while (node.getKind() != NodeKind.CLASS_DEFN) {
env = env.enclEnv;
node = env.node;
}
return node;
}
private void validateInvocationInMatchGuard(BLangInvocation invocation) {
BLangExpression matchedExpr = getMatchedExprIfCalledInMatchGuard(invocation);
if (matchedExpr == null) {
return;
}
BType matchedExprType = matchedExpr.getBType();
if (types.isInherentlyImmutableType(matchedExprType) ||
Symbols.isFlagOn(matchedExprType.flags, Flags.READONLY)) {
return;
}
BSymbol invocationSymbol = invocation.symbol;
if (invocationSymbol == null) {
BLangNode parent = invocation.parent;
if (parent == null || parent.getKind() != NodeKind.TYPE_INIT_EXPR) {
return;
}
BLangTypeInit newExpr = (BLangTypeInit) parent;
if (newExpr.getBType().tag != TypeTags.STREAM) {
return;
}
List<BLangExpression> argsExpr = newExpr.argsExpr;
if (argsExpr.isEmpty()) {
return;
}
BLangExpression streamImplementorExpr = argsExpr.get(0);
BType type = streamImplementorExpr.getBType();
if (!types.isInherentlyImmutableType(type) && !Symbols.isFlagOn(type.flags, Flags.READONLY)) {
dlog.error(streamImplementorExpr.pos,
DiagnosticErrorCode.INVALID_CALL_WITH_MUTABLE_ARGS_IN_MATCH_GUARD);
}
return;
}
long flags = invocationSymbol.flags;
boolean methodCall = Symbols.isFlagOn(flags, Flags.ATTACHED);
boolean callsNonIsolatedFunction = !Symbols.isFlagOn(flags, Flags.ISOLATED) ||
(methodCall && !Symbols.isFlagOn(invocationSymbol.owner.flags, Flags.ISOLATED));
if (callsNonIsolatedFunction) {
dlog.error(invocation.pos, DiagnosticErrorCode.INVALID_NON_ISOLATED_CALL_IN_MATCH_GUARD);
}
List<BLangExpression> args = new ArrayList<>(invocation.requiredArgs);
args.addAll(invocation.restArgs);
for (BLangExpression arg : args) {
BType type = arg.getBType();
if (type != symTable.semanticError &&
!types.isInherentlyImmutableType(type) &&
!Symbols.isFlagOn(type.flags, Flags.READONLY)) {
dlog.error(arg.pos, DiagnosticErrorCode.INVALID_CALL_WITH_MUTABLE_ARGS_IN_MATCH_GUARD);
}
}
}
private BLangExpression getMatchedExprIfCalledInMatchGuard(BLangInvocation invocation) {
BLangNode prevParent = invocation;
BLangNode parent = invocation.parent;
boolean encounteredMatchGuard = false;
while (parent != null) {
NodeKind parentKind = parent.getKind();
switch (parentKind) {
case LAMBDA:
case FUNCTION:
case RESOURCE_FUNC:
return null;
case MATCH_CLAUSE:
if (encounteredMatchGuard) {
return ((BLangMatchStatement) parent.parent).expr;
}
return null;
case MATCH_GUARD:
encounteredMatchGuard = true;
break;
case INVOCATION:
BLangInvocation parentInvocation = (BLangInvocation) parent;
if (parentInvocation.langLibInvocation || prevParent != parentInvocation.expr) {
return null;
}
}
prevParent = parent;
parent = parent.parent;
}
return null;
}
private enum DefaultValueState {
NOT_IN_DEFAULT_VALUE,
RECORD_FIELD_DEFAULT,
OBJECT_FIELD_INITIALIZER,
FUNCTION_IN_DEFAULT_VALUE
}
/**
* @since 2.0.0
*/
public static class AnalyzerData {
SymbolEnv env;
BLangNode parent;
int loopCount;
boolean loopAlterNotAllowed;
boolean inInternallyDefinedBlockStmt;
int workerSystemMovementSequence;
Stack<WorkerActionSystem> workerActionSystemStack = new Stack<>();
Map<BSymbol, Set<BLangNode>> workerReferences = new HashMap<>();
int transactionCount;
boolean withinTransactionScope;
int commitCount;
int rollbackCount;
boolean commitRollbackAllowed;
int commitCountWithinBlock;
int rollbackCountWithinBlock;
Stack<Boolean> loopWithinTransactionCheckStack = new Stack<>();
Stack<Boolean> returnWithinTransactionCheckStack = new Stack<>();
Stack<Boolean> transactionalFuncCheckStack = new Stack<>();
boolean withinLockBlock;
boolean failureHandled;
boolean failVisited;
boolean queryToTableWithKey;
boolean withinQuery;
boolean queryToMap;
Stack<LinkedHashSet<BType>> returnTypes = new Stack<>();
Stack<LinkedHashSet<BType>> errorTypes = new Stack<>();
DefaultValueState defaultValueState = DefaultValueState.NOT_IN_DEFAULT_VALUE;
}
} |
Modify the extra blank lines. | public void assertParse() throws NoSuchFieldException, IllegalAccessException {
SQLParserExecutor sqlParserExecutor = mock(SQLParserExecutor.class);
when(sqlParserExecutor.parse(SQL)).thenReturn(mock(ParseTree.class));
LoadingCache<String, ParseTree> parseTreeCache = CacheBuilder.newBuilder().softValues().initialCapacity(128)
.maximumSize(1024).concurrencyLevel(4).build(new CacheLoader<String, ParseTree>() {
@ParametersAreNonnullByDefault
@Override
public ParseTree load(final String sql) {
return sqlParserExecutor.parse(sql);
}
});
SQLParserEngine sqlParserEngine = new SQLParserEngine("H2");
Field sqlParserExecutorFiled = sqlParserEngine.getClass().getDeclaredField("sqlParserExecutor");
Field parseTreeCacheField = sqlParserEngine.getClass().getDeclaredField("parseTreeCache");
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(sqlParserExecutorFiled, sqlParserExecutorFiled.getModifiers() & ~Modifier.FINAL);
Field modifiersField2 = Field.class.getDeclaredField("modifiers");
modifiersField2.setAccessible(true);
modifiersField2.setInt(parseTreeCacheField, sqlParserExecutorFiled.getModifiers() & ~Modifier.FINAL);
sqlParserExecutorFiled.setAccessible(true);
parseTreeCacheField.setAccessible(true);
sqlParserExecutorFiled.set(sqlParserEngine, sqlParserExecutor);
parseTreeCacheField.set(sqlParserEngine, parseTreeCache);
sqlParserEngine.parse(SQL, true);
verify(sqlParserExecutor, times(1)).parse(SQL);
sqlParserEngine.parse(SQL, true);
verify(sqlParserExecutor, times(1)).parse(SQL);
sqlParserEngine.parse(SQL, false);
verify(sqlParserExecutor, times(2)).parse(SQL);
} | public void assertParse() throws NoSuchFieldException, IllegalAccessException {
SQLParserExecutor sqlParserExecutor = mock(SQLParserExecutor.class);
when(sqlParserExecutor.parse(SQL)).thenReturn(mock(ParseTree.class));
SQLParserEngine sqlParserEngine = new SQLParserEngine("H2");
Field sqlParserExecutorFiled = sqlParserEngine.getClass().getDeclaredField("sqlParserExecutor");
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(sqlParserExecutorFiled, sqlParserExecutorFiled.getModifiers() & ~Modifier.FINAL);
Field modifiersField2 = Field.class.getDeclaredField("modifiers");
modifiersField2.setAccessible(true);
Field parseTreeCacheField = sqlParserEngine.getClass().getDeclaredField("parseTreeCache");
modifiersField2.setInt(parseTreeCacheField, sqlParserExecutorFiled.getModifiers() & ~Modifier.FINAL);
sqlParserExecutorFiled.setAccessible(true);
parseTreeCacheField.setAccessible(true);
sqlParserExecutorFiled.set(sqlParserEngine, sqlParserExecutor);
LoadingCache<String, ParseTree> parseTreeCache = CacheBuilder.newBuilder().softValues().initialCapacity(128)
.maximumSize(1024).concurrencyLevel(4).build(new CacheLoader<String, ParseTree>() {
@ParametersAreNonnullByDefault
@Override
public ParseTree load(final String sql) {
return sqlParserExecutor.parse(sql);
}
});
parseTreeCacheField.set(sqlParserEngine, parseTreeCache);
sqlParserEngine.parse(SQL, true);
verify(sqlParserExecutor, times(1)).parse(SQL);
sqlParserEngine.parse(SQL, true);
verify(sqlParserExecutor, times(1)).parse(SQL);
sqlParserEngine.parse(SQL, false);
verify(sqlParserExecutor, times(2)).parse(SQL);
} | class SQLParserEngineTest {
private static final String SQL = "SELECT COUNT(*) FROM user";
@Test
} | class SQLParserEngineTest {
private static final String SQL = "SELECT COUNT(*) FROM user";
@Test
} |
|
The most risky bug in this code is: Inappropriate use of `contains` method for `reason` comparison which can lead to unexpected behavior when the reason string contains any of the substrings in `MV_NO_AUTOMATIC_ACTIVE_REASONS`. You can modify the code like this: ```java // Change this code block: if (MV_NO_AUTOMATIC_ACTIVE_REASONS.stream().anyMatch(x -> x.contains(reason))) { return; } // To this: if (MV_NO_AUTOMATIC_ACTIVE_REASONS.contains(reason)) { return; } ``` | public static void tryToActivate(MaterializedView mv, boolean checkGracePeriod) {
String reason = mv.getInactiveReason();
if (mv.isActive() || AlterJobMgr.MANUAL_INACTIVE_MV_REASON.equalsIgnoreCase(reason)) {
return;
}
if (MV_NO_AUTOMATIC_ACTIVE_REASONS.stream().anyMatch(x -> x.contains(reason))) {
return;
}
long dbId = mv.getDbId();
Optional<String> dbName = GlobalStateMgr.getCurrentState().mayGetDb(dbId).map(Database::getFullName);
if (!dbName.isPresent()) {
LOG.warn("[MVActiveChecker] cannot activate MV {} since database {} not found", mv.getName(), dbId);
return;
}
MvActiveInfo activeInfo = MV_ACTIVE_INFO.get(mv.getMvId());
if (checkGracePeriod && activeInfo != null && activeInfo.isInGracePeriod()) {
LOG.warn("[MVActiveChecker] skip active MV {} since it's in grace-period", mv);
return;
}
boolean activeOk = false;
String mvFullName =
new TableName(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME, dbName.get(), mv.getName()).toString();
String sql = String.format("ALTER MATERIALIZED VIEW %s active", mvFullName);
LOG.info("[MVActiveChecker] Start to activate MV {} because of its inactive reason: {}", mvFullName, reason);
try {
ConnectContext connect = StatisticUtils.buildConnectContext();
connect.setStatisticsContext(false);
connect.setDatabase(dbName.get());
connect.executeSql(sql);
if (mv.isActive()) {
activeOk = true;
LOG.info("[MVActiveChecker] activate MV {} successfully", mvFullName);
} else {
LOG.warn("[MVActiveChecker] activate MV {} failed", mvFullName);
}
} catch (Exception e) {
LOG.warn("[MVActiveChecker] activate MV {} failed", mvFullName, e);
} finally {
ConnectContext.remove();
}
if (activeOk) {
MV_ACTIVE_INFO.remove(mv.getMvId());
} else {
if (activeInfo != null) {
activeInfo.next();
} else {
MV_ACTIVE_INFO.put(mv.getMvId(), MvActiveInfo.firstFailure());
}
}
} | Optional<String> dbName = GlobalStateMgr.getCurrentState().mayGetDb(dbId).map(Database::getFullName); | public static void tryToActivate(MaterializedView mv, boolean checkGracePeriod) {
String reason = mv.getInactiveReason();
if (mv.isActive() || AlterJobMgr.MANUAL_INACTIVE_MV_REASON.equalsIgnoreCase(reason)) {
return;
}
if (MV_NO_AUTOMATIC_ACTIVE_REASONS.stream().anyMatch(x -> x.contains(reason))) {
return;
}
long dbId = mv.getDbId();
Optional<String> dbName = GlobalStateMgr.getCurrentState().mayGetDb(dbId).map(Database::getFullName);
if (!dbName.isPresent()) {
LOG.warn("[MVActiveChecker] cannot activate MV {} since database {} not found", mv.getName(), dbId);
return;
}
MvActiveInfo activeInfo = MV_ACTIVE_INFO.get(mv.getMvId());
if (checkGracePeriod && activeInfo != null && activeInfo.isInGracePeriod()) {
LOG.warn("[MVActiveChecker] skip active MV {} since it's in grace-period", mv);
return;
}
boolean activeOk = false;
String mvFullName =
new TableName(InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME, dbName.get(), mv.getName()).toString();
String sql = String.format("ALTER MATERIALIZED VIEW %s active", mvFullName);
LOG.info("[MVActiveChecker] Start to activate MV {} because of its inactive reason: {}", mvFullName, reason);
try {
ConnectContext connect = StatisticUtils.buildConnectContext();
connect.setStatisticsContext(false);
connect.setDatabase(dbName.get());
connect.executeSql(sql);
if (mv.isActive()) {
activeOk = true;
LOG.info("[MVActiveChecker] activate MV {} successfully", mvFullName);
} else {
LOG.warn("[MVActiveChecker] activate MV {} failed", mvFullName);
}
} catch (Exception e) {
LOG.warn("[MVActiveChecker] activate MV {} failed", mvFullName, e);
} finally {
ConnectContext.remove();
}
if (activeOk) {
MV_ACTIVE_INFO.remove(mv.getMvId());
} else {
if (activeInfo != null) {
activeInfo.next();
} else {
MV_ACTIVE_INFO.put(mv.getMvId(), MvActiveInfo.firstFailure());
}
}
} | class MVActiveChecker extends FrontendDaemon {
private static final Logger LOG = LogManager.getLogger(MVActiveChecker.class);
private static final Map<MvId, MvActiveInfo> MV_ACTIVE_INFO = Maps.newConcurrentMap();
public MVActiveChecker() {
super("MVActiveChecker", Config.mv_active_checker_interval_seconds * 1000);
}
public static final String MV_BACKUP_INACTIVE_REASON = "it's in backup and will be activated after restore if possible";
private static final Set<String> MV_NO_AUTOMATIC_ACTIVE_REASONS = ImmutableSet.of(MV_BACKUP_INACTIVE_REASON);
@Override
protected void runAfterCatalogReady() {
setInterval(Config.mv_active_checker_interval_seconds * 1000L);
if (!Config.enable_mv_automatic_active_check || FeConstants.runningUnitTest) {
return;
}
try {
process();
} catch (Throwable e) {
LOG.warn("Failed to process one round of MVActiveChecker", e);
}
}
@VisibleForTesting
public void runForTest(boolean clearGrace) {
if (clearGrace) {
clearGracePeriod();
}
process();
}
@VisibleForTesting
private void clearGracePeriod() {
MV_ACTIVE_INFO.clear();
}
private void process() {
Collection<Database> dbs = GlobalStateMgr.getCurrentState().getIdToDb().values();
for (Database db : CollectionUtils.emptyIfNull(dbs)) {
for (Table table : CollectionUtils.emptyIfNull(db.getTables())) {
if (table.isMaterializedView()) {
MaterializedView mv = (MaterializedView) table;
if (!mv.isActive()) {
tryToActivate(mv, true);
}
}
}
}
}
public static void tryToActivate(MaterializedView mv) {
tryToActivate(mv, false);
}
/**
* @param mv
* @param checkGracePeriod whether check the grace period, usually background active would check it, but foreground
* job doesn't
*/
public static class MvActiveInfo {
public static final long MAX_BACKOFF_MINUTES = 60;
private static final long BACKOFF_BASE = 2;
private static final long MAX_BACKOFF_TIMES = (long) (Math.log(MAX_BACKOFF_MINUTES) / Math.log(BACKOFF_BASE));
private LocalDateTime nextActive;
private int failureTimes = 0;
public static MvActiveInfo firstFailure() {
MvActiveInfo info = new MvActiveInfo();
info.next();
return info;
}
/**
* If in grace period, it should not activate the mv
*/
public boolean isInGracePeriod() {
LocalDateTime now = LocalDateTime.now(TimeUtils.getSystemTimeZone().toZoneId());
return now.isBefore(nextActive);
}
public LocalDateTime getNextActive() {
return nextActive;
}
public void next() {
LocalDateTime lastActive = LocalDateTime.now(TimeUtils.getSystemTimeZone().toZoneId());
this.failureTimes++;
this.nextActive = lastActive.plus(failureBackoff(failureTimes));
}
private Duration failureBackoff(int failureTimes) {
if (failureTimes >= MAX_BACKOFF_TIMES) {
return Duration.ofMinutes(MAX_BACKOFF_MINUTES);
}
long expBackoff = (long) Math.pow(BACKOFF_BASE, failureTimes);
return Duration.ofMinutes(expBackoff);
}
}
} | class MVActiveChecker extends FrontendDaemon {
private static final Logger LOG = LogManager.getLogger(MVActiveChecker.class);
private static final Map<MvId, MvActiveInfo> MV_ACTIVE_INFO = Maps.newConcurrentMap();
public MVActiveChecker() {
super("MVActiveChecker", Config.mv_active_checker_interval_seconds * 1000);
}
public static final String MV_BACKUP_INACTIVE_REASON = "it's in backup and will be activated after restore if possible";
private static final Set<String> MV_NO_AUTOMATIC_ACTIVE_REASONS = ImmutableSet.of(MV_BACKUP_INACTIVE_REASON);
@Override
protected void runAfterCatalogReady() {
setInterval(Config.mv_active_checker_interval_seconds * 1000L);
if (!Config.enable_mv_automatic_active_check || FeConstants.runningUnitTest) {
return;
}
try {
process();
} catch (Throwable e) {
LOG.warn("Failed to process one round of MVActiveChecker", e);
}
}
@VisibleForTesting
public void runForTest(boolean clearGrace) {
if (clearGrace) {
clearGracePeriod();
}
process();
}
@VisibleForTesting
private void clearGracePeriod() {
MV_ACTIVE_INFO.clear();
}
private void process() {
Collection<Database> dbs = GlobalStateMgr.getCurrentState().getIdToDb().values();
for (Database db : CollectionUtils.emptyIfNull(dbs)) {
for (Table table : CollectionUtils.emptyIfNull(db.getTables())) {
if (table.isMaterializedView()) {
MaterializedView mv = (MaterializedView) table;
if (!mv.isActive()) {
tryToActivate(mv, true);
}
}
}
}
}
public static void tryToActivate(MaterializedView mv) {
tryToActivate(mv, false);
}
/**
* @param mv
* @param checkGracePeriod whether check the grace period, usually background active would check it, but foreground
* job doesn't
*/
public static class MvActiveInfo {
public static final long MAX_BACKOFF_MINUTES = 60;
private static final long BACKOFF_BASE = 2;
private static final long MAX_BACKOFF_TIMES = (long) (Math.log(MAX_BACKOFF_MINUTES) / Math.log(BACKOFF_BASE));
private LocalDateTime nextActive;
private int failureTimes = 0;
public static MvActiveInfo firstFailure() {
MvActiveInfo info = new MvActiveInfo();
info.next();
return info;
}
/**
* If in grace period, it should not activate the mv
*/
public boolean isInGracePeriod() {
LocalDateTime now = LocalDateTime.now(TimeUtils.getSystemTimeZone().toZoneId());
return now.isBefore(nextActive);
}
public LocalDateTime getNextActive() {
return nextActive;
}
public void next() {
LocalDateTime lastActive = LocalDateTime.now(TimeUtils.getSystemTimeZone().toZoneId());
this.failureTimes++;
this.nextActive = lastActive.plus(failureBackoff(failureTimes));
}
private Duration failureBackoff(int failureTimes) {
if (failureTimes >= MAX_BACKOFF_TIMES) {
return Duration.ofMinutes(MAX_BACKOFF_MINUTES);
}
long expBackoff = (long) Math.pow(BACKOFF_BASE, failureTimes);
return Duration.ofMinutes(expBackoff);
}
}
} |
Services are more @Sanne expertise, but it seems to make sense. I'm surprised that the failure only happens for native queries if this is what was missing. | void services(BuildProducer<ServiceProviderBuildItem> producer) {
producer.produce(
new ServiceProviderBuildItem(org.hibernate.service.spi.SessionFactoryServiceContributor.class.getName(),
org.hibernate.reactive.service.internal.ReactiveSessionFactoryServiceContributor.class.getName()));
} | org.hibernate.reactive.service.internal.ReactiveSessionFactoryServiceContributor.class.getName())); | void services(BuildProducer<ServiceProviderBuildItem> producer) {
producer.produce(
new ServiceProviderBuildItem(org.hibernate.service.spi.SessionFactoryServiceContributor.class.getName(),
org.hibernate.reactive.service.internal.ReactiveSessionFactoryServiceContributor.class.getName()));
} | class HibernateReactiveProcessor {
private static final String HIBERNATE_REACTIVE = "Hibernate Reactive";
private static final Logger LOG = Logger.getLogger(HibernateReactiveProcessor.class);
static final String[] REFLECTIVE_CONSTRUCTORS_NEEDED = {
"org.hibernate.reactive.persister.entity.impl.ReactiveSingleTableEntityPersister",
"org.hibernate.reactive.persister.entity.impl.ReactiveJoinedSubclassEntityPersister",
"org.hibernate.reactive.persister.entity.impl.ReactiveUnionSubclassEntityPersister",
"org.hibernate.reactive.persister.collection.impl.ReactiveOneToManyPersister",
"org.hibernate.reactive.persister.collection.impl.ReactiveBasicCollectionPersister",
};
@BuildStep
void registerBeans(BuildProducer<AdditionalBeanBuildItem> additionalBeans, CombinedIndexBuildItem combinedIndex,
List<PersistenceUnitDescriptorBuildItem> descriptors,
JpaModelBuildItem jpaModel) {
if (descriptors.size() == 1) {
additionalBeans.produce(new AdditionalBeanBuildItem(ReactiveSessionFactoryProducer.class));
} else {
LOG.warnf(
"Skipping registration of %s bean because exactly one persistence unit is required for their registration",
ReactiveSessionFactoryProducer.class.getSimpleName());
}
}
@BuildStep
void reflections(BuildProducer<ReflectiveClassBuildItem> reflectiveClass) {
reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, REFLECTIVE_CONSTRUCTORS_NEEDED));
}
@BuildStep
@BuildStep
@Record(STATIC_INIT)
public void build(RecorderContext recorderContext,
HibernateReactiveRecorder recorder,
JpaModelBuildItem jpaModel) {
final boolean enableRx = hasEntities(jpaModel);
recorder.callHibernateReactiveFeatureInit(enableRx);
}
@BuildStep
public void buildReactivePersistenceUnit(
HibernateOrmConfig hibernateOrmConfig, CombinedIndexBuildItem index,
DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig,
List<PersistenceXmlDescriptorBuildItem> persistenceXmlDescriptors,
ApplicationArchivesBuildItem applicationArchivesBuildItem,
LaunchModeBuildItem launchMode,
JpaModelBuildItem jpaModel,
BuildProducer<SystemPropertyBuildItem> systemProperties,
BuildProducer<NativeImageResourceBuildItem> nativeImageResources,
BuildProducer<HotDeploymentWatchedFileBuildItem> hotDeploymentWatchedFiles,
BuildProducer<PersistenceUnitDescriptorBuildItem> persistenceUnitDescriptors,
List<DefaultDataSourceDbKindBuildItem> defaultDataSourceDbKindBuildItems,
CurateOutcomeBuildItem curateOutcomeBuildItem,
List<DatabaseKindDialectBuildItem> dbKindDialectBuildItems) {
final boolean enableHR = hasEntities(jpaModel);
if (!enableHR) {
LOG.warn("Hibernate Reactive is disabled because no JPA entities were found");
return;
}
for (PersistenceXmlDescriptorBuildItem persistenceXmlDescriptorBuildItem : persistenceXmlDescriptors) {
String provider = persistenceXmlDescriptorBuildItem.getDescriptor().getProviderClassName();
if (provider == null ||
provider.equals(FastBootHibernateReactivePersistenceProvider.class.getCanonicalName()) ||
provider.equals(FastBootHibernateReactivePersistenceProvider.IMPLEMENTATION_NAME)) {
throw new ConfigurationException(
"Cannot use persistence.xml with Hibernate Reactive in Quarkus. Must use application.properties instead.");
}
}
Optional<String> dbKindOptional = DefaultDataSourceDbKindBuildItem.resolve(
dataSourcesBuildTimeConfig.defaultDataSource.dbKind,
defaultDataSourceDbKindBuildItems,
dataSourcesBuildTimeConfig.defaultDataSource.devservices.enabled
.orElse(dataSourcesBuildTimeConfig.namedDataSources.isEmpty()),
curateOutcomeBuildItem);
if (dbKindOptional.isPresent()) {
final String dbKind = dbKindOptional.get();
HibernateOrmConfigPersistenceUnit persistenceUnitConfig = hibernateOrmConfig.defaultPersistenceUnit;
ParsedPersistenceXmlDescriptor reactivePU = generateReactivePersistenceUnit(
hibernateOrmConfig, index, persistenceUnitConfig, jpaModel,
dbKind, applicationArchivesBuildItem, launchMode.getLaunchMode(),
systemProperties, nativeImageResources, hotDeploymentWatchedFiles, dbKindDialectBuildItems);
persistenceUnitDescriptors.produce(new PersistenceUnitDescriptorBuildItem(reactivePU,
PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME,
new RecordedConfig(Optional.of(DataSourceUtil.DEFAULT_DATASOURCE_NAME),
dbKindOptional, Optional.empty(),
io.quarkus.hibernate.orm.runtime.migration.MultiTenancyStrategy.NONE,
hibernateOrmConfig.database.ormCompatibilityVersion,
persistenceUnitConfig.unsupportedProperties),
null,
jpaModel.getXmlMappings(reactivePU.getName()),
true, false));
}
}
@BuildStep
void waitForVertxPool(List<VertxPoolBuildItem> vertxPool,
List<PersistenceUnitDescriptorBuildItem> persistenceUnitDescriptorBuildItems,
BuildProducer<HibernateOrmIntegrationRuntimeConfiguredBuildItem> runtimeConfigured) {
for (PersistenceUnitDescriptorBuildItem puDescriptor : persistenceUnitDescriptorBuildItems) {
runtimeConfigured.produce(new HibernateOrmIntegrationRuntimeConfiguredBuildItem(HIBERNATE_REACTIVE,
puDescriptor.getPersistenceUnitName()));
}
}
@BuildStep
@Record(RUNTIME_INIT)
PersistenceProviderSetUpBuildItem setUpPersistenceProviderAndWaitForVertxPool(HibernateReactiveRecorder recorder,
HibernateOrmRuntimeConfig hibernateOrmRuntimeConfig,
List<HibernateOrmIntegrationRuntimeConfiguredBuildItem> integrationBuildItems,
BuildProducer<RecorderBeanInitializedBuildItem> orderEnforcer) {
recorder.initializePersistenceProvider(hibernateOrmRuntimeConfig,
HibernateOrmIntegrationRuntimeConfiguredBuildItem.collectDescriptors(integrationBuildItems));
return new PersistenceProviderSetUpBuildItem();
}
/**
* This is mostly copied from
* io.quarkus.hibernate.orm.deployment.HibernateOrmProcessor
* Key differences are:
* - Always produces a persistence unit descriptor, since we assume there always 1 reactive persistence unit
* - Any JDBC-only configuration settings are removed
* - If we ever add any Reactive-only config settings, they can be set here
*/
private static ParsedPersistenceXmlDescriptor generateReactivePersistenceUnit(
HibernateOrmConfig hibernateOrmConfig, CombinedIndexBuildItem index,
HibernateOrmConfigPersistenceUnit persistenceUnitConfig,
JpaModelBuildItem jpaModel,
String dbKind,
ApplicationArchivesBuildItem applicationArchivesBuildItem,
LaunchMode launchMode,
BuildProducer<SystemPropertyBuildItem> systemProperties,
BuildProducer<NativeImageResourceBuildItem> nativeImageResources,
BuildProducer<HotDeploymentWatchedFileBuildItem> hotDeploymentWatchedFiles,
List<DatabaseKindDialectBuildItem> dbKindDialectBuildItems) {
String persistenceUnitConfigName = PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME;
Optional<String> explicitDialect = persistenceUnitConfig.dialect.dialect;
String dialect;
if (explicitDialect.isPresent()) {
dialect = explicitDialect.get();
} else {
dialect = Dialects.guessDialect(persistenceUnitConfigName, dbKind, dbKindDialectBuildItems);
}
ParsedPersistenceXmlDescriptor desc = new ParsedPersistenceXmlDescriptor(null);
desc.setName(HibernateReactive.DEFAULT_REACTIVE_PERSISTENCE_UNIT_NAME);
desc.setTransactionType(PersistenceUnitTransactionType.RESOURCE_LOCAL);
desc.getProperties().setProperty(AvailableSettings.DIALECT, dialect);
desc.setExcludeUnlistedClasses(true);
Map<String, Set<String>> modelClassesAndPackagesPerPersistencesUnits = HibernateOrmProcessor
.getModelClassesAndPackagesPerPersistenceUnits(hibernateOrmConfig, jpaModel, index.getIndex(), true);
Set<String> nonDefaultPUWithModelClassesOrPackages = modelClassesAndPackagesPerPersistencesUnits.entrySet().stream()
.filter(e -> !PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME.equals(e.getKey()) && !e.getValue().isEmpty())
.map(Map.Entry::getKey)
.collect(Collectors.toSet());
if (!nonDefaultPUWithModelClassesOrPackages.isEmpty()) {
LOG.warnf("Entities are affected to non-default Hibernate Reactive persistence units %s."
+ " Since Hibernate Reactive only works with the default persistence unit, those entities will be ignored.",
nonDefaultPUWithModelClassesOrPackages);
}
Set<String> modelClassesAndPackages = modelClassesAndPackagesPerPersistencesUnits
.getOrDefault(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME, Collections.emptySet());
if (modelClassesAndPackages.isEmpty()) {
LOG.warnf("Could not find any entities affected to the Hibernate Reactive persistence unit.");
} else {
desc.addClasses(new ArrayList<>(modelClassesAndPackages));
}
if (persistenceUnitConfig.dialect.storageEngine.isPresent()) {
systemProperties.produce(new SystemPropertyBuildItem(AvailableSettings.STORAGE_ENGINE,
persistenceUnitConfig.dialect.storageEngine.get()));
}
persistenceUnitConfig.physicalNamingStrategy.ifPresent(
namingStrategy -> desc.getProperties()
.setProperty(AvailableSettings.PHYSICAL_NAMING_STRATEGY, namingStrategy));
persistenceUnitConfig.implicitNamingStrategy.ifPresent(
namingStrategy -> desc.getProperties()
.setProperty(AvailableSettings.IMPLICIT_NAMING_STRATEGY, namingStrategy));
if (persistenceUnitConfig.mapping.timeZoneDefaultStorage.isPresent()) {
desc.getProperties().setProperty(AvailableSettings.TIMEZONE_DEFAULT_STORAGE,
persistenceUnitConfig.mapping.timeZoneDefaultStorage.get().name());
}
desc.getProperties().setProperty(AvailableSettings.PREFERRED_POOLED_OPTIMIZER,
persistenceUnitConfig.mapping.idOptimizerDefault
.orElse(HibernateOrmConfigPersistenceUnit.IdOptimizerType.POOLED_LO).configName);
desc.getProperties().setProperty(AvailableSettings.HBM2DDL_CHARSET_NAME,
persistenceUnitConfig.database.charset.name());
if (persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ALL
|| persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS
|| persistenceUnitConfig.database.globallyQuotedIdentifiers) {
desc.getProperties().setProperty(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, "true");
}
if (persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS) {
desc.getProperties().setProperty(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS_SKIP_COLUMN_DEFINITIONS, "true");
} else if (persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ONLY_KEYWORDS) {
desc.getProperties().setProperty(AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true");
}
int batchSize = firstPresent(persistenceUnitConfig.fetch.batchSize, persistenceUnitConfig.batchFetchSize)
.orElse(-1);
if (batchSize > 0) {
desc.getProperties().setProperty(AvailableSettings.DEFAULT_BATCH_FETCH_SIZE,
Integer.toString(batchSize));
desc.getProperties().setProperty(AvailableSettings.BATCH_FETCH_STYLE, BatchFetchStyle.PADDED.toString());
}
if (persistenceUnitConfig.fetch.maxDepth.isPresent()) {
setMaxFetchDepth(desc, persistenceUnitConfig.fetch.maxDepth);
} else if (persistenceUnitConfig.maxFetchDepth.isPresent()) {
setMaxFetchDepth(desc, persistenceUnitConfig.maxFetchDepth);
}
desc.getProperties().setProperty(AvailableSettings.QUERY_PLAN_CACHE_MAX_SIZE, Integer.toString(
persistenceUnitConfig.query.queryPlanCacheMaxSize));
desc.getProperties().setProperty(AvailableSettings.DEFAULT_NULL_ORDERING,
persistenceUnitConfig.query.defaultNullOrdering.name().toLowerCase());
desc.getProperties().setProperty(AvailableSettings.IN_CLAUSE_PARAMETER_PADDING,
String.valueOf(persistenceUnitConfig.query.inClauseParameterPadding));
persistenceUnitConfig.jdbc.timezone.ifPresent(
timezone -> desc.getProperties().setProperty(AvailableSettings.JDBC_TIME_ZONE, timezone));
persistenceUnitConfig.jdbc.statementFetchSize.ifPresent(
fetchSize -> desc.getProperties().setProperty(AvailableSettings.STATEMENT_FETCH_SIZE,
String.valueOf(fetchSize)));
persistenceUnitConfig.jdbc.statementBatchSize.ifPresent(
statementBatchSize -> desc.getProperties().setProperty(AvailableSettings.STATEMENT_BATCH_SIZE,
String.valueOf(statementBatchSize)));
if (hibernateOrmConfig.metricsEnabled
|| (hibernateOrmConfig.statistics.isPresent() && hibernateOrmConfig.statistics.get())) {
desc.getProperties().setProperty(AvailableSettings.GENERATE_STATISTICS, "true");
}
List<String> importFiles = getSqlLoadScript(persistenceUnitConfig.sqlLoadScript, launchMode);
if (!importFiles.isEmpty()) {
for (String importFile : importFiles) {
Path loadScriptPath = applicationArchivesBuildItem.getRootArchive().getChildPath(importFile);
if (loadScriptPath != null && !Files.isDirectory(loadScriptPath)) {
nativeImageResources.produce(new NativeImageResourceBuildItem(importFile));
hotDeploymentWatchedFiles.produce(new HotDeploymentWatchedFileBuildItem(importFile));
} else if (persistenceUnitConfig.sqlLoadScript.isPresent()) {
String propertyName = HibernateOrmRuntimeConfig.puPropertyKey(persistenceUnitConfigName, "sql-load-script");
throw new ConfigurationException(
"Unable to find file referenced in '"
+ propertyName + "="
+ String.join(",", persistenceUnitConfig.sqlLoadScript.get())
+ "'. Remove property or add file to your path.",
Collections.singleton(propertyName));
}
}
if (persistenceUnitConfig.sqlLoadScript.isPresent()) {
desc.getProperties().setProperty(AvailableSettings.HBM2DDL_IMPORT_FILES, String.join(",", importFiles));
}
} else {
desc.getProperties().setProperty(AvailableSettings.HBM2DDL_IMPORT_FILES, "");
}
if (persistenceUnitConfig.secondLevelCachingEnabled) {
Properties p = desc.getProperties();
p.putIfAbsent(USE_DIRECT_REFERENCE_CACHE_ENTRIES, Boolean.TRUE);
p.putIfAbsent(USE_SECOND_LEVEL_CACHE, Boolean.TRUE);
p.putIfAbsent(USE_QUERY_CACHE, Boolean.TRUE);
p.putIfAbsent(JAKARTA_SHARED_CACHE_MODE, SharedCacheMode.ENABLE_SELECTIVE);
Map<String, String> cacheConfigEntries = HibernateConfigUtil.getCacheConfigEntries(persistenceUnitConfig);
for (Entry<String, String> entry : cacheConfigEntries.entrySet()) {
desc.getProperties().setProperty(entry.getKey(), entry.getValue());
}
} else {
Properties p = desc.getProperties();
p.put(USE_DIRECT_REFERENCE_CACHE_ENTRIES, Boolean.FALSE);
p.put(USE_SECOND_LEVEL_CACHE, Boolean.FALSE);
p.put(USE_QUERY_CACHE, Boolean.FALSE);
p.put(JAKARTA_SHARED_CACHE_MODE, SharedCacheMode.NONE);
}
return desc;
}
private static void setMaxFetchDepth(ParsedPersistenceXmlDescriptor descriptor, OptionalInt maxFetchDepth) {
descriptor.getProperties().setProperty(AvailableSettings.MAX_FETCH_DEPTH, String.valueOf(maxFetchDepth.getAsInt()));
}
private static List<String> getSqlLoadScript(Optional<List<String>> sqlLoadScript, LaunchMode launchMode) {
if (sqlLoadScript.isPresent()) {
return sqlLoadScript.get().stream()
.filter(s -> !HibernateOrmProcessor.NO_SQL_LOAD_SCRIPT_FILE.equalsIgnoreCase(s))
.collect(Collectors.toList());
} else if (launchMode == LaunchMode.NORMAL) {
return Collections.emptyList();
} else {
return List.of("import.sql");
}
}
private boolean hasEntities(JpaModelBuildItem jpaModel) {
return !jpaModel.getEntityClassNames().isEmpty();
}
} | class HibernateReactiveProcessor {
private static final String HIBERNATE_REACTIVE = "Hibernate Reactive";
private static final Logger LOG = Logger.getLogger(HibernateReactiveProcessor.class);
static final String[] REFLECTIVE_CONSTRUCTORS_NEEDED = {
"org.hibernate.reactive.persister.entity.impl.ReactiveSingleTableEntityPersister",
"org.hibernate.reactive.persister.entity.impl.ReactiveJoinedSubclassEntityPersister",
"org.hibernate.reactive.persister.entity.impl.ReactiveUnionSubclassEntityPersister",
"org.hibernate.reactive.persister.collection.impl.ReactiveOneToManyPersister",
"org.hibernate.reactive.persister.collection.impl.ReactiveBasicCollectionPersister",
};
@BuildStep
void registerBeans(BuildProducer<AdditionalBeanBuildItem> additionalBeans, CombinedIndexBuildItem combinedIndex,
List<PersistenceUnitDescriptorBuildItem> descriptors,
JpaModelBuildItem jpaModel) {
if (descriptors.size() == 1) {
additionalBeans.produce(new AdditionalBeanBuildItem(ReactiveSessionFactoryProducer.class));
} else {
LOG.warnf(
"Skipping registration of %s bean because exactly one persistence unit is required for their registration",
ReactiveSessionFactoryProducer.class.getSimpleName());
}
}
@BuildStep
void reflections(BuildProducer<ReflectiveClassBuildItem> reflectiveClass) {
reflectiveClass.produce(new ReflectiveClassBuildItem(false, false, REFLECTIVE_CONSTRUCTORS_NEEDED));
}
@BuildStep
@BuildStep
@Record(STATIC_INIT)
public void build(RecorderContext recorderContext,
HibernateReactiveRecorder recorder,
JpaModelBuildItem jpaModel) {
final boolean enableRx = hasEntities(jpaModel);
recorder.callHibernateReactiveFeatureInit(enableRx);
}
@BuildStep
public void buildReactivePersistenceUnit(
HibernateOrmConfig hibernateOrmConfig, CombinedIndexBuildItem index,
DataSourcesBuildTimeConfig dataSourcesBuildTimeConfig,
List<PersistenceXmlDescriptorBuildItem> persistenceXmlDescriptors,
ApplicationArchivesBuildItem applicationArchivesBuildItem,
LaunchModeBuildItem launchMode,
JpaModelBuildItem jpaModel,
BuildProducer<SystemPropertyBuildItem> systemProperties,
BuildProducer<NativeImageResourceBuildItem> nativeImageResources,
BuildProducer<HotDeploymentWatchedFileBuildItem> hotDeploymentWatchedFiles,
BuildProducer<PersistenceUnitDescriptorBuildItem> persistenceUnitDescriptors,
List<DefaultDataSourceDbKindBuildItem> defaultDataSourceDbKindBuildItems,
CurateOutcomeBuildItem curateOutcomeBuildItem,
List<DatabaseKindDialectBuildItem> dbKindDialectBuildItems) {
final boolean enableHR = hasEntities(jpaModel);
if (!enableHR) {
LOG.warn("Hibernate Reactive is disabled because no JPA entities were found");
return;
}
for (PersistenceXmlDescriptorBuildItem persistenceXmlDescriptorBuildItem : persistenceXmlDescriptors) {
String provider = persistenceXmlDescriptorBuildItem.getDescriptor().getProviderClassName();
if (provider == null ||
provider.equals(FastBootHibernateReactivePersistenceProvider.class.getCanonicalName()) ||
provider.equals(FastBootHibernateReactivePersistenceProvider.IMPLEMENTATION_NAME)) {
throw new ConfigurationException(
"Cannot use persistence.xml with Hibernate Reactive in Quarkus. Must use application.properties instead.");
}
}
Optional<String> dbKindOptional = DefaultDataSourceDbKindBuildItem.resolve(
dataSourcesBuildTimeConfig.defaultDataSource.dbKind,
defaultDataSourceDbKindBuildItems,
dataSourcesBuildTimeConfig.defaultDataSource.devservices.enabled
.orElse(dataSourcesBuildTimeConfig.namedDataSources.isEmpty()),
curateOutcomeBuildItem);
if (dbKindOptional.isPresent()) {
final String dbKind = dbKindOptional.get();
HibernateOrmConfigPersistenceUnit persistenceUnitConfig = hibernateOrmConfig.defaultPersistenceUnit;
ParsedPersistenceXmlDescriptor reactivePU = generateReactivePersistenceUnit(
hibernateOrmConfig, index, persistenceUnitConfig, jpaModel,
dbKind, applicationArchivesBuildItem, launchMode.getLaunchMode(),
systemProperties, nativeImageResources, hotDeploymentWatchedFiles, dbKindDialectBuildItems);
persistenceUnitDescriptors.produce(new PersistenceUnitDescriptorBuildItem(reactivePU,
PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME,
new RecordedConfig(Optional.of(DataSourceUtil.DEFAULT_DATASOURCE_NAME),
dbKindOptional, Optional.empty(),
io.quarkus.hibernate.orm.runtime.migration.MultiTenancyStrategy.NONE,
hibernateOrmConfig.database.ormCompatibilityVersion,
persistenceUnitConfig.unsupportedProperties),
null,
jpaModel.getXmlMappings(reactivePU.getName()),
true, false));
}
}
@BuildStep
void waitForVertxPool(List<VertxPoolBuildItem> vertxPool,
List<PersistenceUnitDescriptorBuildItem> persistenceUnitDescriptorBuildItems,
BuildProducer<HibernateOrmIntegrationRuntimeConfiguredBuildItem> runtimeConfigured) {
for (PersistenceUnitDescriptorBuildItem puDescriptor : persistenceUnitDescriptorBuildItems) {
runtimeConfigured.produce(new HibernateOrmIntegrationRuntimeConfiguredBuildItem(HIBERNATE_REACTIVE,
puDescriptor.getPersistenceUnitName()));
}
}
@BuildStep
@Record(RUNTIME_INIT)
PersistenceProviderSetUpBuildItem setUpPersistenceProviderAndWaitForVertxPool(HibernateReactiveRecorder recorder,
HibernateOrmRuntimeConfig hibernateOrmRuntimeConfig,
List<HibernateOrmIntegrationRuntimeConfiguredBuildItem> integrationBuildItems,
BuildProducer<RecorderBeanInitializedBuildItem> orderEnforcer) {
recorder.initializePersistenceProvider(hibernateOrmRuntimeConfig,
HibernateOrmIntegrationRuntimeConfiguredBuildItem.collectDescriptors(integrationBuildItems));
return new PersistenceProviderSetUpBuildItem();
}
/**
* This is mostly copied from
* io.quarkus.hibernate.orm.deployment.HibernateOrmProcessor
* Key differences are:
* - Always produces a persistence unit descriptor, since we assume there always 1 reactive persistence unit
* - Any JDBC-only configuration settings are removed
* - If we ever add any Reactive-only config settings, they can be set here
*/
private static ParsedPersistenceXmlDescriptor generateReactivePersistenceUnit(
HibernateOrmConfig hibernateOrmConfig, CombinedIndexBuildItem index,
HibernateOrmConfigPersistenceUnit persistenceUnitConfig,
JpaModelBuildItem jpaModel,
String dbKind,
ApplicationArchivesBuildItem applicationArchivesBuildItem,
LaunchMode launchMode,
BuildProducer<SystemPropertyBuildItem> systemProperties,
BuildProducer<NativeImageResourceBuildItem> nativeImageResources,
BuildProducer<HotDeploymentWatchedFileBuildItem> hotDeploymentWatchedFiles,
List<DatabaseKindDialectBuildItem> dbKindDialectBuildItems) {
String persistenceUnitConfigName = PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME;
Optional<String> explicitDialect = persistenceUnitConfig.dialect.dialect;
String dialect;
if (explicitDialect.isPresent()) {
dialect = explicitDialect.get();
} else {
dialect = Dialects.guessDialect(persistenceUnitConfigName, dbKind, dbKindDialectBuildItems);
}
ParsedPersistenceXmlDescriptor desc = new ParsedPersistenceXmlDescriptor(null);
desc.setName(HibernateReactive.DEFAULT_REACTIVE_PERSISTENCE_UNIT_NAME);
desc.setTransactionType(PersistenceUnitTransactionType.RESOURCE_LOCAL);
desc.getProperties().setProperty(AvailableSettings.DIALECT, dialect);
desc.setExcludeUnlistedClasses(true);
Map<String, Set<String>> modelClassesAndPackagesPerPersistencesUnits = HibernateOrmProcessor
.getModelClassesAndPackagesPerPersistenceUnits(hibernateOrmConfig, jpaModel, index.getIndex(), true);
Set<String> nonDefaultPUWithModelClassesOrPackages = modelClassesAndPackagesPerPersistencesUnits.entrySet().stream()
.filter(e -> !PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME.equals(e.getKey()) && !e.getValue().isEmpty())
.map(Map.Entry::getKey)
.collect(Collectors.toSet());
if (!nonDefaultPUWithModelClassesOrPackages.isEmpty()) {
LOG.warnf("Entities are affected to non-default Hibernate Reactive persistence units %s."
+ " Since Hibernate Reactive only works with the default persistence unit, those entities will be ignored.",
nonDefaultPUWithModelClassesOrPackages);
}
Set<String> modelClassesAndPackages = modelClassesAndPackagesPerPersistencesUnits
.getOrDefault(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME, Collections.emptySet());
if (modelClassesAndPackages.isEmpty()) {
LOG.warnf("Could not find any entities affected to the Hibernate Reactive persistence unit.");
} else {
desc.addClasses(new ArrayList<>(modelClassesAndPackages));
}
if (persistenceUnitConfig.dialect.storageEngine.isPresent()) {
systemProperties.produce(new SystemPropertyBuildItem(AvailableSettings.STORAGE_ENGINE,
persistenceUnitConfig.dialect.storageEngine.get()));
}
persistenceUnitConfig.physicalNamingStrategy.ifPresent(
namingStrategy -> desc.getProperties()
.setProperty(AvailableSettings.PHYSICAL_NAMING_STRATEGY, namingStrategy));
persistenceUnitConfig.implicitNamingStrategy.ifPresent(
namingStrategy -> desc.getProperties()
.setProperty(AvailableSettings.IMPLICIT_NAMING_STRATEGY, namingStrategy));
if (persistenceUnitConfig.mapping.timeZoneDefaultStorage.isPresent()) {
desc.getProperties().setProperty(AvailableSettings.TIMEZONE_DEFAULT_STORAGE,
persistenceUnitConfig.mapping.timeZoneDefaultStorage.get().name());
}
desc.getProperties().setProperty(AvailableSettings.PREFERRED_POOLED_OPTIMIZER,
persistenceUnitConfig.mapping.idOptimizerDefault
.orElse(HibernateOrmConfigPersistenceUnit.IdOptimizerType.POOLED_LO).configName);
desc.getProperties().setProperty(AvailableSettings.HBM2DDL_CHARSET_NAME,
persistenceUnitConfig.database.charset.name());
if (persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ALL
|| persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS
|| persistenceUnitConfig.database.globallyQuotedIdentifiers) {
desc.getProperties().setProperty(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS, "true");
}
if (persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ALL_EXCEPT_COLUMN_DEFINITIONS) {
desc.getProperties().setProperty(AvailableSettings.GLOBALLY_QUOTED_IDENTIFIERS_SKIP_COLUMN_DEFINITIONS, "true");
} else if (persistenceUnitConfig.identifierQuotingStrategy == IdentifierQuotingStrategy.ONLY_KEYWORDS) {
desc.getProperties().setProperty(AvailableSettings.KEYWORD_AUTO_QUOTING_ENABLED, "true");
}
int batchSize = firstPresent(persistenceUnitConfig.fetch.batchSize, persistenceUnitConfig.batchFetchSize)
.orElse(-1);
if (batchSize > 0) {
desc.getProperties().setProperty(AvailableSettings.DEFAULT_BATCH_FETCH_SIZE,
Integer.toString(batchSize));
desc.getProperties().setProperty(AvailableSettings.BATCH_FETCH_STYLE, BatchFetchStyle.PADDED.toString());
}
if (persistenceUnitConfig.fetch.maxDepth.isPresent()) {
setMaxFetchDepth(desc, persistenceUnitConfig.fetch.maxDepth);
} else if (persistenceUnitConfig.maxFetchDepth.isPresent()) {
setMaxFetchDepth(desc, persistenceUnitConfig.maxFetchDepth);
}
desc.getProperties().setProperty(AvailableSettings.QUERY_PLAN_CACHE_MAX_SIZE, Integer.toString(
persistenceUnitConfig.query.queryPlanCacheMaxSize));
desc.getProperties().setProperty(AvailableSettings.DEFAULT_NULL_ORDERING,
persistenceUnitConfig.query.defaultNullOrdering.name().toLowerCase());
desc.getProperties().setProperty(AvailableSettings.IN_CLAUSE_PARAMETER_PADDING,
String.valueOf(persistenceUnitConfig.query.inClauseParameterPadding));
persistenceUnitConfig.jdbc.timezone.ifPresent(
timezone -> desc.getProperties().setProperty(AvailableSettings.JDBC_TIME_ZONE, timezone));
persistenceUnitConfig.jdbc.statementFetchSize.ifPresent(
fetchSize -> desc.getProperties().setProperty(AvailableSettings.STATEMENT_FETCH_SIZE,
String.valueOf(fetchSize)));
persistenceUnitConfig.jdbc.statementBatchSize.ifPresent(
statementBatchSize -> desc.getProperties().setProperty(AvailableSettings.STATEMENT_BATCH_SIZE,
String.valueOf(statementBatchSize)));
if (hibernateOrmConfig.metricsEnabled
|| (hibernateOrmConfig.statistics.isPresent() && hibernateOrmConfig.statistics.get())) {
desc.getProperties().setProperty(AvailableSettings.GENERATE_STATISTICS, "true");
}
List<String> importFiles = getSqlLoadScript(persistenceUnitConfig.sqlLoadScript, launchMode);
if (!importFiles.isEmpty()) {
for (String importFile : importFiles) {
Path loadScriptPath = applicationArchivesBuildItem.getRootArchive().getChildPath(importFile);
if (loadScriptPath != null && !Files.isDirectory(loadScriptPath)) {
nativeImageResources.produce(new NativeImageResourceBuildItem(importFile));
hotDeploymentWatchedFiles.produce(new HotDeploymentWatchedFileBuildItem(importFile));
} else if (persistenceUnitConfig.sqlLoadScript.isPresent()) {
String propertyName = HibernateOrmRuntimeConfig.puPropertyKey(persistenceUnitConfigName, "sql-load-script");
throw new ConfigurationException(
"Unable to find file referenced in '"
+ propertyName + "="
+ String.join(",", persistenceUnitConfig.sqlLoadScript.get())
+ "'. Remove property or add file to your path.",
Collections.singleton(propertyName));
}
}
if (persistenceUnitConfig.sqlLoadScript.isPresent()) {
desc.getProperties().setProperty(AvailableSettings.HBM2DDL_IMPORT_FILES, String.join(",", importFiles));
}
} else {
desc.getProperties().setProperty(AvailableSettings.HBM2DDL_IMPORT_FILES, "");
}
if (persistenceUnitConfig.secondLevelCachingEnabled) {
Properties p = desc.getProperties();
p.putIfAbsent(USE_DIRECT_REFERENCE_CACHE_ENTRIES, Boolean.TRUE);
p.putIfAbsent(USE_SECOND_LEVEL_CACHE, Boolean.TRUE);
p.putIfAbsent(USE_QUERY_CACHE, Boolean.TRUE);
p.putIfAbsent(JAKARTA_SHARED_CACHE_MODE, SharedCacheMode.ENABLE_SELECTIVE);
Map<String, String> cacheConfigEntries = HibernateConfigUtil.getCacheConfigEntries(persistenceUnitConfig);
for (Entry<String, String> entry : cacheConfigEntries.entrySet()) {
desc.getProperties().setProperty(entry.getKey(), entry.getValue());
}
} else {
Properties p = desc.getProperties();
p.put(USE_DIRECT_REFERENCE_CACHE_ENTRIES, Boolean.FALSE);
p.put(USE_SECOND_LEVEL_CACHE, Boolean.FALSE);
p.put(USE_QUERY_CACHE, Boolean.FALSE);
p.put(JAKARTA_SHARED_CACHE_MODE, SharedCacheMode.NONE);
}
return desc;
}
private static void setMaxFetchDepth(ParsedPersistenceXmlDescriptor descriptor, OptionalInt maxFetchDepth) {
descriptor.getProperties().setProperty(AvailableSettings.MAX_FETCH_DEPTH, String.valueOf(maxFetchDepth.getAsInt()));
}
private static List<String> getSqlLoadScript(Optional<List<String>> sqlLoadScript, LaunchMode launchMode) {
if (sqlLoadScript.isPresent()) {
return sqlLoadScript.get().stream()
.filter(s -> !HibernateOrmProcessor.NO_SQL_LOAD_SCRIPT_FILE.equalsIgnoreCase(s))
.collect(Collectors.toList());
} else if (launchMode == LaunchMode.NORMAL) {
return Collections.emptyList();
} else {
return List.of("import.sql");
}
}
private boolean hasEntities(JpaModelBuildItem jpaModel) {
return !jpaModel.getEntityClassNames().isEmpty();
}
} |
How do we get the executor service from ExecutorOptions? As far as I can see, PipelineOptions should be then a reader field, but how do we get the ExecutorService from PipelineOptions? | private void doClose() {
try {
closeAutoscaler();
closeConsumer();
ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
executorService.schedule(
() -> {
LOG.debug(
"Closing session and connection after delay {}", source.spec.getCloseTimeout());
checkpointMark.discard();
closeSession();
closeConnection();
},
source.spec.getCloseTimeout(),
TimeUnit.MILLISECONDS);
} catch (Exception e) {
LOG.error("Error closing reader", e);
}
} | ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor(); | private void doClose() {
try {
closeAutoscaler();
closeConsumer();
ScheduledExecutorService executorService =
options.as(ExecutorOptions.class).getScheduledExecutorService();
executorService.schedule(
() -> {
LOG.debug(
"Closing session and connection after delay {}", source.spec.getCloseTimeout());
checkpointMark.discard();
closeSession();
closeConnection();
},
source.spec.getCloseTimeout().getMillis(),
TimeUnit.MILLISECONDS);
} catch (Exception e) {
LOG.error("Error closing reader", e);
}
} | class UnboundedJmsReader<T> extends UnboundedReader<T> {
private UnboundedJmsSource<T> source;
private JmsCheckpointMark checkpointMark;
private Connection connection;
private Session session;
private MessageConsumer consumer;
private AutoScaler autoScaler;
private T currentMessage;
private Instant currentTimestamp;
public UnboundedJmsReader(UnboundedJmsSource<T> source) {
this.source = source;
this.checkpointMark = new JmsCheckpointMark();
this.currentMessage = null;
}
@Override
public boolean start() throws IOException {
Read<T> spec = source.spec;
ConnectionFactory connectionFactory = spec.getConnectionFactory();
try {
Connection connection;
if (spec.getUsername() != null) {
connection = connectionFactory.createConnection(spec.getUsername(), spec.getPassword());
} else {
connection = connectionFactory.createConnection();
}
connection.start();
this.connection = connection;
if (spec.getAutoScaler() == null) {
this.autoScaler = new DefaultAutoscaler();
} else {
this.autoScaler = spec.getAutoScaler();
}
this.autoScaler.start();
} catch (Exception e) {
throw new IOException("Error connecting to JMS", e);
}
try {
this.session = this.connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
} catch (Exception e) {
throw new IOException("Error creating JMS session", e);
}
try {
if (spec.getTopic() != null) {
this.consumer = this.session.createConsumer(this.session.createTopic(spec.getTopic()));
} else {
this.consumer = this.session.createConsumer(this.session.createQueue(spec.getQueue()));
}
} catch (Exception e) {
throw new IOException("Error creating JMS consumer", e);
}
return advance();
}
@Override
public boolean advance() throws IOException {
try {
Message message = this.consumer.receiveNoWait();
if (message == null) {
currentMessage = null;
return false;
}
checkpointMark.add(message);
currentMessage = this.source.spec.getMessageMapper().mapMessage(message);
currentTimestamp = new Instant(message.getJMSTimestamp());
return true;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public T getCurrent() throws NoSuchElementException {
if (currentMessage == null) {
throw new NoSuchElementException();
}
return currentMessage;
}
@Override
public Instant getWatermark() {
return checkpointMark.getOldestMessageTimestamp();
}
@Override
public Instant getCurrentTimestamp() {
if (currentMessage == null) {
throw new NoSuchElementException();
}
return currentTimestamp;
}
@Override
public CheckpointMark getCheckpointMark() {
return checkpointMark;
}
@Override
public long getTotalBacklogBytes() {
return this.autoScaler.getTotalBacklogBytes();
}
@Override
public UnboundedSource<T, ?> getCurrentSource() {
return source;
}
@Override
public void close() {
doClose();
}
@SuppressWarnings("FutureReturnValueIgnored")
private void closeConnection() {
try {
if (connection != null) {
connection.stop();
connection.close();
connection = null;
}
} catch (Exception e) {
LOG.error("Error closing connection", e);
}
}
private void closeSession() {
try {
if (session != null) {
session.close();
session = null;
}
} catch (Exception e) {
LOG.error("Error closing session" + e.getMessage(), e);
}
}
private void closeConsumer() {
try {
if (consumer != null) {
consumer.close();
consumer = null;
}
} catch (Exception e) {
LOG.error("Error closing consumer", e);
}
}
private void closeAutoscaler() {
try {
if (autoScaler != null) {
autoScaler.stop();
autoScaler = null;
}
} catch (Exception e) {
LOG.error("Error closing autoscaler", e);
}
}
@Override
protected void finalize() {
doClose();
}
} | class UnboundedJmsReader<T> extends UnboundedReader<T> {
private UnboundedJmsSource<T> source;
private JmsCheckpointMark checkpointMark;
private Connection connection;
private Session session;
private MessageConsumer consumer;
private AutoScaler autoScaler;
private T currentMessage;
private Instant currentTimestamp;
private PipelineOptions options;
public UnboundedJmsReader(UnboundedJmsSource<T> source, PipelineOptions options) {
this.source = source;
this.checkpointMark = new JmsCheckpointMark();
this.currentMessage = null;
this.options = options;
}
@Override
public boolean start() throws IOException {
Read<T> spec = source.spec;
ConnectionFactory connectionFactory = spec.getConnectionFactory();
try {
Connection connection;
if (spec.getUsername() != null) {
connection = connectionFactory.createConnection(spec.getUsername(), spec.getPassword());
} else {
connection = connectionFactory.createConnection();
}
connection.start();
this.connection = connection;
if (spec.getAutoScaler() == null) {
this.autoScaler = new DefaultAutoscaler();
} else {
this.autoScaler = spec.getAutoScaler();
}
this.autoScaler.start();
} catch (Exception e) {
throw new IOException("Error connecting to JMS", e);
}
try {
this.session = this.connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
} catch (Exception e) {
throw new IOException("Error creating JMS session", e);
}
try {
if (spec.getTopic() != null) {
this.consumer = this.session.createConsumer(this.session.createTopic(spec.getTopic()));
} else {
this.consumer = this.session.createConsumer(this.session.createQueue(spec.getQueue()));
}
} catch (Exception e) {
throw new IOException("Error creating JMS consumer", e);
}
return advance();
}
@Override
public boolean advance() throws IOException {
try {
Message message = this.consumer.receiveNoWait();
if (message == null) {
currentMessage = null;
return false;
}
checkpointMark.add(message);
currentMessage = this.source.spec.getMessageMapper().mapMessage(message);
currentTimestamp = new Instant(message.getJMSTimestamp());
return true;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public T getCurrent() throws NoSuchElementException {
if (currentMessage == null) {
throw new NoSuchElementException();
}
return currentMessage;
}
@Override
public Instant getWatermark() {
return checkpointMark.getOldestMessageTimestamp();
}
@Override
public Instant getCurrentTimestamp() {
if (currentMessage == null) {
throw new NoSuchElementException();
}
return currentTimestamp;
}
@Override
public CheckpointMark getCheckpointMark() {
return checkpointMark;
}
@Override
public long getTotalBacklogBytes() {
return this.autoScaler.getTotalBacklogBytes();
}
@Override
public UnboundedSource<T, ?> getCurrentSource() {
return source;
}
@Override
public void close() {
doClose();
}
@SuppressWarnings("FutureReturnValueIgnored")
private void closeConnection() {
try {
if (connection != null) {
connection.stop();
connection.close();
connection = null;
}
} catch (Exception e) {
LOG.error("Error closing connection", e);
}
}
private void closeSession() {
try {
if (session != null) {
session.close();
session = null;
}
} catch (Exception e) {
LOG.error("Error closing session" + e.getMessage(), e);
}
}
private void closeConsumer() {
try {
if (consumer != null) {
consumer.close();
consumer = null;
}
} catch (Exception e) {
LOG.error("Error closing consumer", e);
}
}
private void closeAutoscaler() {
try {
if (autoScaler != null) {
autoScaler.stop();
autoScaler = null;
}
} catch (Exception e) {
LOG.error("Error closing autoscaler", e);
}
}
@Override
protected void finalize() {
doClose();
}
} |
It was a deliberate decision not to do it using CleanupTimer. Setting the timer is an implementation detail and should be transparent to the CleanupTimer class. | public void setTimer(TimerData timer) {
if (timer.getTimestamp().isAfter(GlobalWindow.INSTANCE.maxTimestamp())) {
if (timer.getTimerId().equals(StatefulDoFnRunner.TimeInternalsCleanupTimer.GC_TIMER_ID)
|| timer.getTimerId().equals(ExecutableStageDoFnOperator.CleanupTimer.GC_TIMER_ID)) {
return;
} else {
throw new IllegalStateException(
"Timer cannot be set past the maximum timestamp: " + timer);
}
}
try {
LOG.debug(
"Setting timer: {} at {} with output time {}",
timer.getTimerId(),
timer.getTimestamp().getMillis(),
timer.getOutputTimestamp().getMillis());
String contextTimerId = getContextTimerId(timer.getTimerId(), timer.getNamespace());
cancelPendingTimerById(contextTimerId);
registerTimer(timer, contextTimerId);
} catch (Exception e) {
throw new RuntimeException("Failed to set timer", e);
}
} | if (timer.getTimestamp().isAfter(GlobalWindow.INSTANCE.maxTimestamp())) { | public void setTimer(TimerData timer) {
try {
LOG.debug(
"Setting timer: {} at {} with output time {}",
timer.getTimerId(),
timer.getTimestamp().getMillis(),
timer.getOutputTimestamp().getMillis());
String contextTimerId = getContextTimerId(timer.getTimerId(), timer.getNamespace());
cancelPendingTimerById(contextTimerId);
registerTimer(timer, contextTimerId);
} catch (Exception e) {
throw new RuntimeException("Failed to set timer", e);
}
} | class FlinkTimerInternals implements TimerInternals {
private static final String PENDING_TIMERS_STATE_NAME = "pending-timers";
/**
* Pending Timers (=not been fired yet) by context id. The id is generated from the state
* namespace of the timer and the timer's id. Necessary for supporting removal of existing
* timers. In Flink removal of timers can only be done by providing id and time of the timer.
*
* <p>CAUTION: This map is scoped by the current active key. Do not attempt to perform any
* calculations which span across keys.
*/
@VisibleForTesting final MapState<String, TimerData> pendingTimersById;
private FlinkTimerInternals() {
MapStateDescriptor<String, TimerData> pendingTimersByIdStateDescriptor =
new MapStateDescriptor<>(
PENDING_TIMERS_STATE_NAME,
new StringSerializer(),
new CoderTypeSerializer<>(timerCoder));
this.pendingTimersById = getKeyedStateStore().getMapState(pendingTimersByIdStateDescriptor);
populateOutputTimestampQueue();
}
/**
* Processes all pending processing timers. This is intended for use during shutdown. From Flink
* 1.10 on, processing timer execution is stopped when the operator is closed. This leads to
* problems for applications which assume all pending timers will be completed. Although Flink
* does drain the remaining timers after close(), this is not sufficient because no new timers
* are allowed to be scheduled anymore. This breaks Beam pipelines which rely on all processing
* timers to be scheduled and executed.
*/
void processPendingProcessingTimeTimers() {
final KeyedStateBackend<Object> keyedStateBackend = getKeyedStateBackend();
final InternalPriorityQueue<InternalTimer<Object, TimerData>> processingTimeTimersQueue =
Workarounds.retrieveInternalProcessingTimerQueue(timerService);
InternalTimer<Object, TimerData> internalTimer;
while ((internalTimer = processingTimeTimersQueue.poll()) != null) {
keyedStateBackend.setCurrentKey(internalTimer.getKey());
TimerData timer = internalTimer.getNamespace();
checkInvokeStartBundle();
fireTimer(timer);
}
}
/** Keeps a minimum output timestamp across all event timers. */
private void onNewEventTimer(TimerData newTimer) {
Preconditions.checkState(
newTimer.getDomain() == TimeDomain.EVENT_TIME,
"Timer with id %s is not an event time timer!",
newTimer.getTimerId());
if (timerUsesOutputTimestamp(newTimer)) {
keyedStateInternals.addWatermarkHoldUsage(newTimer.getOutputTimestamp());
}
}
private void onRemovedEventTimer(TimerData removedTimer) {
Preconditions.checkState(
removedTimer.getDomain() == TimeDomain.EVENT_TIME,
"Timer with id %s is not an event time timer!",
removedTimer.getTimerId());
if (timerUsesOutputTimestamp(removedTimer)) {
keyedStateInternals.removeWatermarkHoldUsage(removedTimer.getOutputTimestamp());
}
}
private void populateOutputTimestampQueue() {
final KeyedStateBackend<Object> keyedStateBackend = getKeyedStateBackend();
final Object currentKey = keyedStateBackend.getCurrentKey();
try (Stream<Object> keys =
keyedStateBackend.getKeys(PENDING_TIMERS_STATE_NAME, VoidNamespace.INSTANCE)) {
keys.forEach(
key -> {
keyedStateBackend.setCurrentKey(key);
try {
for (TimerData timerData : pendingTimersById.values()) {
if (timerData.getDomain() == TimeDomain.EVENT_TIME) {
if (timerUsesOutputTimestamp(timerData)) {
keyedStateInternals.addWatermarkHoldUsage(timerData.getOutputTimestamp());
}
}
}
} catch (Exception e) {
throw new RuntimeException(
"Exception while reading set of timers for key: " + key, e);
}
});
} finally {
if (currentKey != null) {
keyedStateBackend.setCurrentKey(currentKey);
}
}
}
private boolean timerUsesOutputTimestamp(TimerData timer) {
return timer.getOutputTimestamp().isBefore(timer.getTimestamp());
}
@Override
public void setTimer(
StateNamespace namespace,
String timerId,
String timerFamilyId,
Instant target,
Instant outputTimestamp,
TimeDomain timeDomain) {
setTimer(
TimerData.of(timerId, timerFamilyId, namespace, target, outputTimestamp, timeDomain));
}
/**
* @deprecated use {@link
* TimeDomain)}.
*/
@Deprecated
@Override
private void registerTimer(TimerData timer, String contextTimerId) throws Exception {
pendingTimersById.put(contextTimerId, timer);
long time = timer.getTimestamp().getMillis();
switch (timer.getDomain()) {
case EVENT_TIME:
timerService.registerEventTimeTimer(timer, adjustTimestampForFlink(time));
onNewEventTimer(timer);
break;
case PROCESSING_TIME:
case SYNCHRONIZED_PROCESSING_TIME:
timerService.registerProcessingTimeTimer(timer, adjustTimestampForFlink(time));
break;
default:
throw new UnsupportedOperationException("Unsupported time domain: " + timer.getDomain());
}
}
/**
* Looks up a timer by its id. This is necessary to support canceling existing timers with the
* same id. Flink does not provide this functionality.
*/
private void cancelPendingTimerById(String contextTimerId) throws Exception {
TimerData oldTimer = pendingTimersById.get(contextTimerId);
if (oldTimer != null) {
deleteTimerInternal(oldTimer);
}
}
/**
* Hook which must be called when a timer is fired or deleted to perform cleanup. Note: Make
* sure that the state backend key is set correctly. It is best to run this in the fireTimer()
* method.
*/
void onFiredOrDeletedTimer(TimerData timer) {
try {
pendingTimersById.remove(getContextTimerId(timer.getTimerId(), timer.getNamespace()));
if (timer.getDomain() == TimeDomain.EVENT_TIME) {
onRemovedEventTimer(timer);
}
} catch (Exception e) {
throw new RuntimeException("Failed to cleanup pending timers state.", e);
}
}
/** @deprecated use {@link
@Deprecated
@Override
public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) {
throw new UnsupportedOperationException("Canceling of a timer by ID is not yet supported.");
}
@Override
public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
try {
cancelPendingTimerById(getContextTimerId(timerId, namespace));
} catch (Exception e) {
throw new RuntimeException("Failed to cancel timer", e);
}
}
/** @deprecated use {@link
@Override
@Deprecated
public void deleteTimer(TimerData timer) {
deleteTimer(timer.getNamespace(), timer.getTimerId(), timer.getDomain());
}
void deleteTimerInternal(TimerData timer) {
long time = timer.getTimestamp().getMillis();
switch (timer.getDomain()) {
case EVENT_TIME:
timerService.deleteEventTimeTimer(timer, adjustTimestampForFlink(time));
break;
case PROCESSING_TIME:
case SYNCHRONIZED_PROCESSING_TIME:
timerService.deleteProcessingTimeTimer(timer, adjustTimestampForFlink(time));
break;
default:
throw new UnsupportedOperationException("Unsupported time domain: " + timer.getDomain());
}
onFiredOrDeletedTimer(timer);
}
@Override
public Instant currentProcessingTime() {
return new Instant(timerService.currentProcessingTime());
}
@Nullable
@Override
public Instant currentSynchronizedProcessingTime() {
return new Instant(timerService.currentProcessingTime());
}
@Override
public Instant currentInputWatermarkTime() {
return new Instant(getEffectiveInputWatermark());
}
@Nullable
@Override
public Instant currentOutputWatermarkTime() {
return new Instant(currentOutputWatermark);
}
/**
* Check whether event time timers lower or equal to the given timestamp exist. Caution: This is
* scoped by the current key.
*/
public boolean hasPendingEventTimeTimers(long maxTimestamp) throws Exception {
for (TimerData timer : pendingTimersById.values()) {
if (timer.getDomain() == TimeDomain.EVENT_TIME
&& timer.getTimestamp().getMillis() <= maxTimestamp) {
return true;
}
}
return false;
}
/** Unique contextual id of a timer. Used to look up any existing timers in a context. */
private String getContextTimerId(String timerId, StateNamespace namespace) {
return timerId + namespace.stringKey();
}
} | class FlinkTimerInternals implements TimerInternals {
private static final String PENDING_TIMERS_STATE_NAME = "pending-timers";
/**
* Pending Timers (=not been fired yet) by context id. The id is generated from the state
* namespace of the timer and the timer's id. Necessary for supporting removal of existing
* timers. In Flink removal of timers can only be done by providing id and time of the timer.
*
* <p>CAUTION: This map is scoped by the current active key. Do not attempt to perform any
* calculations which span across keys.
*/
@VisibleForTesting final MapState<String, TimerData> pendingTimersById;
private FlinkTimerInternals() {
MapStateDescriptor<String, TimerData> pendingTimersByIdStateDescriptor =
new MapStateDescriptor<>(
PENDING_TIMERS_STATE_NAME,
new StringSerializer(),
new CoderTypeSerializer<>(timerCoder));
this.pendingTimersById = getKeyedStateStore().getMapState(pendingTimersByIdStateDescriptor);
populateOutputTimestampQueue();
}
/**
* Processes all pending processing timers. This is intended for use during shutdown. From Flink
* 1.10 on, processing timer execution is stopped when the operator is closed. This leads to
* problems for applications which assume all pending timers will be completed. Although Flink
* does drain the remaining timers after close(), this is not sufficient because no new timers
* are allowed to be scheduled anymore. This breaks Beam pipelines which rely on all processing
* timers to be scheduled and executed.
*/
void processPendingProcessingTimeTimers() {
final KeyedStateBackend<Object> keyedStateBackend = getKeyedStateBackend();
final InternalPriorityQueue<InternalTimer<Object, TimerData>> processingTimeTimersQueue =
Workarounds.retrieveInternalProcessingTimerQueue(timerService);
InternalTimer<Object, TimerData> internalTimer;
while ((internalTimer = processingTimeTimersQueue.poll()) != null) {
keyedStateBackend.setCurrentKey(internalTimer.getKey());
TimerData timer = internalTimer.getNamespace();
checkInvokeStartBundle();
fireTimer(timer);
}
}
/** Keeps a minimum output timestamp across all event timers. */
private void onNewEventTimer(TimerData newTimer) {
Preconditions.checkState(
newTimer.getDomain() == TimeDomain.EVENT_TIME,
"Timer with id %s is not an event time timer!",
newTimer.getTimerId());
if (timerUsesOutputTimestamp(newTimer)) {
keyedStateInternals.addWatermarkHoldUsage(newTimer.getOutputTimestamp());
}
}
private void onRemovedEventTimer(TimerData removedTimer) {
Preconditions.checkState(
removedTimer.getDomain() == TimeDomain.EVENT_TIME,
"Timer with id %s is not an event time timer!",
removedTimer.getTimerId());
if (timerUsesOutputTimestamp(removedTimer)) {
keyedStateInternals.removeWatermarkHoldUsage(removedTimer.getOutputTimestamp());
}
}
private void populateOutputTimestampQueue() {
final KeyedStateBackend<Object> keyedStateBackend = getKeyedStateBackend();
final Object currentKey = keyedStateBackend.getCurrentKey();
try (Stream<Object> keys =
keyedStateBackend.getKeys(PENDING_TIMERS_STATE_NAME, VoidNamespace.INSTANCE)) {
keys.forEach(
key -> {
keyedStateBackend.setCurrentKey(key);
try {
for (TimerData timerData : pendingTimersById.values()) {
if (timerData.getDomain() == TimeDomain.EVENT_TIME) {
if (timerUsesOutputTimestamp(timerData)) {
keyedStateInternals.addWatermarkHoldUsage(timerData.getOutputTimestamp());
}
}
}
} catch (Exception e) {
throw new RuntimeException(
"Exception while reading set of timers for key: " + key, e);
}
});
} finally {
if (currentKey != null) {
keyedStateBackend.setCurrentKey(currentKey);
}
}
}
private boolean timerUsesOutputTimestamp(TimerData timer) {
return timer.getOutputTimestamp().isBefore(timer.getTimestamp());
}
@Override
public void setTimer(
StateNamespace namespace,
String timerId,
String timerFamilyId,
Instant target,
Instant outputTimestamp,
TimeDomain timeDomain) {
setTimer(
TimerData.of(timerId, timerFamilyId, namespace, target, outputTimestamp, timeDomain));
}
/**
* @deprecated use {@link
* TimeDomain)}.
*/
@Deprecated
@Override
private void registerTimer(TimerData timer, String contextTimerId) throws Exception {
pendingTimersById.put(contextTimerId, timer);
long time = timer.getTimestamp().getMillis();
switch (timer.getDomain()) {
case EVENT_TIME:
timerService.registerEventTimeTimer(timer, adjustTimestampForFlink(time));
onNewEventTimer(timer);
break;
case PROCESSING_TIME:
case SYNCHRONIZED_PROCESSING_TIME:
timerService.registerProcessingTimeTimer(timer, adjustTimestampForFlink(time));
break;
default:
throw new UnsupportedOperationException("Unsupported time domain: " + timer.getDomain());
}
}
/**
* Looks up a timer by its id. This is necessary to support canceling existing timers with the
* same id. Flink does not provide this functionality.
*/
private void cancelPendingTimerById(String contextTimerId) throws Exception {
TimerData oldTimer = pendingTimersById.get(contextTimerId);
if (oldTimer != null) {
deleteTimerInternal(oldTimer);
}
}
/**
* Hook which must be called when a timer is fired or deleted to perform cleanup. Note: Make
* sure that the state backend key is set correctly. It is best to run this in the fireTimer()
* method.
*/
void onFiredOrDeletedTimer(TimerData timer) {
try {
pendingTimersById.remove(getContextTimerId(timer.getTimerId(), timer.getNamespace()));
if (timer.getDomain() == TimeDomain.EVENT_TIME) {
onRemovedEventTimer(timer);
}
} catch (Exception e) {
throw new RuntimeException("Failed to cleanup pending timers state.", e);
}
}
/** @deprecated use {@link
@Deprecated
@Override
public void deleteTimer(StateNamespace namespace, String timerId, String timerFamilyId) {
throw new UnsupportedOperationException("Canceling of a timer by ID is not yet supported.");
}
@Override
public void deleteTimer(StateNamespace namespace, String timerId, TimeDomain timeDomain) {
try {
cancelPendingTimerById(getContextTimerId(timerId, namespace));
} catch (Exception e) {
throw new RuntimeException("Failed to cancel timer", e);
}
}
/** @deprecated use {@link
@Override
@Deprecated
public void deleteTimer(TimerData timer) {
deleteTimer(timer.getNamespace(), timer.getTimerId(), timer.getDomain());
}
void deleteTimerInternal(TimerData timer) {
long time = timer.getTimestamp().getMillis();
switch (timer.getDomain()) {
case EVENT_TIME:
timerService.deleteEventTimeTimer(timer, adjustTimestampForFlink(time));
break;
case PROCESSING_TIME:
case SYNCHRONIZED_PROCESSING_TIME:
timerService.deleteProcessingTimeTimer(timer, adjustTimestampForFlink(time));
break;
default:
throw new UnsupportedOperationException("Unsupported time domain: " + timer.getDomain());
}
onFiredOrDeletedTimer(timer);
}
@Override
public Instant currentProcessingTime() {
return new Instant(timerService.currentProcessingTime());
}
@Nullable
@Override
public Instant currentSynchronizedProcessingTime() {
return new Instant(timerService.currentProcessingTime());
}
@Override
public Instant currentInputWatermarkTime() {
return new Instant(getEffectiveInputWatermark());
}
@Nullable
@Override
public Instant currentOutputWatermarkTime() {
return new Instant(currentOutputWatermark);
}
/**
* Check whether event time timers lower or equal to the given timestamp exist. Caution: This is
* scoped by the current key.
*/
public boolean hasPendingEventTimeTimers(long maxTimestamp) throws Exception {
for (TimerData timer : pendingTimersById.values()) {
if (timer.getDomain() == TimeDomain.EVENT_TIME
&& timer.getTimestamp().getMillis() <= maxTimestamp) {
return true;
}
}
return false;
}
/** Unique contextual id of a timer. Used to look up any existing timers in a context. */
private String getContextTimerId(String timerId, StateNamespace namespace) {
return timerId + namespace.stringKey();
}
} |
This kind of thing is exactly why I LOVE AssertJ :) | public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil.deserializeAsList(kubernetesDir.resolve("kubernetes.yml"));
assertThat(kubernetesList).filteredOn(h -> "Role".equals(h.getKind())).hasSize(1);
assertThat(kubernetesList).anySatisfy(res -> {
assertThat(res).isInstanceOfSatisfying(Role.class, role -> {
assertThat(role.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("view-secrets");
});
assertThat(role.getRules()).hasOnlyOneElementSatisfying(r -> {
assertThat(r).isInstanceOfSatisfying(PolicyRule.class, rule -> {
assertThat(rule.getApiGroups()).containsExactly("");
assertThat(rule.getResources()).containsExactly("secrets");
assertThat(rule.getVerbs()).containsExactly("get", "list", "watch");
});
});
});
});
assertThat(kubernetesList).filteredOn(h -> "RoleBinding".equals(h.getKind())).hasSize(2);
assertThat(kubernetesList).anySatisfy(res -> {
assertThat(res).isInstanceOfSatisfying(RoleBinding.class, roleBinding -> {
assertThat(roleBinding.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("kubernetes-config-with-secrets:view-secrets");
});
assertThat(roleBinding.getRoleRef().getKind()).isEqualTo("Role");
assertThat(roleBinding.getRoleRef().getName()).isEqualTo("view-secrets");
assertThat(roleBinding.getSubjects()).hasOnlyOneElementSatisfying(subject -> {
assertThat(subject.getKind()).isEqualTo("ServiceAccount");
assertThat(subject.getName()).isEqualTo("kubernetes-config-with-secrets");
});
});
});
assertThat(kubernetesList).anySatisfy(res -> {
assertThat(res).isInstanceOfSatisfying(RoleBinding.class, roleBinding -> {
assertThat(roleBinding.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("kubernetes-config-with-secrets:view");
});
assertThat(roleBinding.getRoleRef().getKind()).isEqualTo("ClusterRole");
assertThat(roleBinding.getRoleRef().getName()).isEqualTo("view");
assertThat(roleBinding.getSubjects()).hasOnlyOneElementSatisfying(subject -> {
assertThat(subject.getKind()).isEqualTo("ServiceAccount");
assertThat(subject.getName()).isEqualTo("kubernetes-config-with-secrets");
});
});
});
} | assertThat(rule.getVerbs()).containsExactly("get", "list", "watch"); | public void assertGeneratedResources() throws IOException {
Path kubernetesDir = prodModeTestResults.getBuildDir().resolve("kubernetes");
assertThat(kubernetesDir)
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.json"))
.isDirectoryContaining(p -> p.getFileName().endsWith("kubernetes.yml"));
List<HasMetadata> kubernetesList = DeserializationUtil.deserializeAsList(kubernetesDir.resolve("kubernetes.yml"));
assertThat(kubernetesList).filteredOn(h -> "Role".equals(h.getKind())).hasSize(1);
assertThat(kubernetesList).anySatisfy(res -> {
assertThat(res).isInstanceOfSatisfying(Role.class, role -> {
assertThat(role.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("view-secrets");
});
assertThat(role.getRules()).hasOnlyOneElementSatisfying(r -> {
assertThat(r).isInstanceOfSatisfying(PolicyRule.class, rule -> {
assertThat(rule.getApiGroups()).containsExactly("");
assertThat(rule.getResources()).containsExactly("secrets");
assertThat(rule.getVerbs()).containsExactly("get", "list", "watch");
});
});
});
});
assertThat(kubernetesList).filteredOn(h -> "RoleBinding".equals(h.getKind())).hasSize(2);
assertThat(kubernetesList).anySatisfy(res -> {
assertThat(res).isInstanceOfSatisfying(RoleBinding.class, roleBinding -> {
assertThat(roleBinding.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("kubernetes-config-with-secrets:view-secrets");
});
assertThat(roleBinding.getRoleRef().getKind()).isEqualTo("Role");
assertThat(roleBinding.getRoleRef().getName()).isEqualTo("view-secrets");
assertThat(roleBinding.getSubjects()).hasOnlyOneElementSatisfying(subject -> {
assertThat(subject.getKind()).isEqualTo("ServiceAccount");
assertThat(subject.getName()).isEqualTo("kubernetes-config-with-secrets");
});
});
});
assertThat(kubernetesList).anySatisfy(res -> {
assertThat(res).isInstanceOfSatisfying(RoleBinding.class, roleBinding -> {
assertThat(roleBinding.getMetadata()).satisfies(m -> {
assertThat(m.getName()).isEqualTo("kubernetes-config-with-secrets:view");
});
assertThat(roleBinding.getRoleRef().getKind()).isEqualTo("ClusterRole");
assertThat(roleBinding.getRoleRef().getName()).isEqualTo("view");
assertThat(roleBinding.getSubjects()).hasOnlyOneElementSatisfying(subject -> {
assertThat(subject.getKind()).isEqualTo("ServiceAccount");
assertThat(subject.getName()).isEqualTo("kubernetes-config-with-secrets");
});
});
});
} | class KubernetesConfigWithSecretsTest {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClasses(GreetingResource.class))
.setApplicationName("kubernetes-config-with-secrets")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("kubernetes-config-with-secrets.properties")
.setForcedDependencies(Collections.singletonList(
new AppArtifact("io.quarkus", "quarkus-kubernetes-config", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
} | class KubernetesConfigWithSecretsTest {
@RegisterExtension
static final QuarkusProdModeTest config = new QuarkusProdModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClasses(GreetingResource.class))
.setApplicationName("kubernetes-config-with-secrets")
.setApplicationVersion("0.1-SNAPSHOT")
.withConfigurationResource("kubernetes-config-with-secrets.properties")
.setForcedDependencies(Collections.singletonList(
new AppArtifact("io.quarkus", "quarkus-kubernetes-config", Version.getVersion())));
@ProdBuildResults
private ProdModeTestResults prodModeTestResults;
@Test
} |
Ok, it seems to be an IDE-specific thing. I can reproduce the failure when calling the test from the command line: ```bash $ /mvnw -pl flink-runtime test -Dtest=ExecutingTest -Dfast ``` | private Executing build(MockExecutingContext ctx) {
executionGraph.transitionToRunning();
try {
return new Executing(
executionGraph,
getExecutionGraphHandler(executionGraph, ctx.getMainThreadExecutor()),
operatorCoordinatorHandler,
log,
ctx,
ClassLoader.getSystemClassLoader(),
new ArrayList<>(),
scalingIntervalMin,
scalingIntervalMax,
lastRescale);
} finally {
Preconditions.checkState(
!ctx.hadStateTransition,
"State construction is an on-going state transition, during which no further transitions are allowed.");
}
} | try { | private Executing build(MockExecutingContext ctx) {
executionGraph.transitionToRunning();
try {
return new Executing(
executionGraph,
getExecutionGraphHandler(executionGraph, ctx.getMainThreadExecutor()),
operatorCoordinatorHandler,
log,
ctx,
ClassLoader.getSystemClassLoader(),
new ArrayList<>(),
scalingIntervalMin,
scalingIntervalMax,
lastRescale);
} finally {
Preconditions.checkState(
!ctx.hadStateTransition,
"State construction is an on-going state transition, during which no further transitions are allowed.");
}
} | class ExecutingStateBuilder {
private ExecutionGraph executionGraph =
TestingDefaultExecutionGraphBuilder.newBuilder()
.build(EXECUTOR_RESOURCE.getExecutor());
private OperatorCoordinatorHandler operatorCoordinatorHandler;
private Duration scalingIntervalMin = Duration.ZERO;
@Nullable private Duration scalingIntervalMax;
private Instant lastRescale = Instant.now();
private ExecutingStateBuilder() throws JobException, JobExecutionException {
operatorCoordinatorHandler = new TestingOperatorCoordinatorHandler();
}
public ExecutingStateBuilder setExecutionGraph(ExecutionGraph executionGraph) {
this.executionGraph = executionGraph;
return this;
}
public ExecutingStateBuilder setOperatorCoordinatorHandler(
OperatorCoordinatorHandler operatorCoordinatorHandler) {
this.operatorCoordinatorHandler = operatorCoordinatorHandler;
return this;
}
public ExecutingStateBuilder setScalingIntervalMin(Duration scalingIntervalMin) {
this.scalingIntervalMin = scalingIntervalMin;
return this;
}
public ExecutingStateBuilder setScalingIntervalMax(Duration scalingIntervalMax) {
this.scalingIntervalMax = scalingIntervalMax;
return this;
}
public ExecutingStateBuilder setLastRescale(Instant lastRescale) {
this.lastRescale = lastRescale;
return this;
}
} | class ExecutingStateBuilder {
private ExecutionGraph executionGraph =
TestingDefaultExecutionGraphBuilder.newBuilder()
.build(EXECUTOR_RESOURCE.getExecutor());
private OperatorCoordinatorHandler operatorCoordinatorHandler;
private Duration scalingIntervalMin = Duration.ZERO;
@Nullable private Duration scalingIntervalMax;
private Instant lastRescale = Instant.now();
private ExecutingStateBuilder() throws JobException, JobExecutionException {
operatorCoordinatorHandler = new TestingOperatorCoordinatorHandler();
}
public ExecutingStateBuilder setExecutionGraph(ExecutionGraph executionGraph) {
this.executionGraph = executionGraph;
return this;
}
public ExecutingStateBuilder setOperatorCoordinatorHandler(
OperatorCoordinatorHandler operatorCoordinatorHandler) {
this.operatorCoordinatorHandler = operatorCoordinatorHandler;
return this;
}
public ExecutingStateBuilder setScalingIntervalMin(Duration scalingIntervalMin) {
this.scalingIntervalMin = scalingIntervalMin;
return this;
}
public ExecutingStateBuilder setScalingIntervalMax(Duration scalingIntervalMax) {
this.scalingIntervalMax = scalingIntervalMax;
return this;
}
public ExecutingStateBuilder setLastRescale(Instant lastRescale) {
this.lastRescale = lastRescale;
return this;
}
} |
this should say that they could not be loaded from the flink configuration | public static Configuration getHadoopConfiguration(org.apache.flink.configuration.Configuration flinkConfiguration) {
Configuration retConf = new Configuration();
final String hdfsDefaultPath = flinkConfiguration.getString(ConfigConstants
.HDFS_DEFAULT_CONFIG, null);
if (hdfsDefaultPath != null) {
retConf.addResource(new org.apache.hadoop.fs.Path(hdfsDefaultPath));
} else {
LOG.debug("Cannot find hdfs-default configuration file");
}
final String hdfsSitePath = flinkConfiguration.getString(ConfigConstants.HDFS_SITE_CONFIG, null);
if (hdfsSitePath != null) {
retConf.addResource(new org.apache.hadoop.fs.Path(hdfsSitePath));
} else {
LOG.debug("Cannot find hdfs-site configuration file");
}
String[] possibleHadoopConfPaths = new String[4];
possibleHadoopConfPaths[0] = flinkConfiguration.getString(ConfigConstants.PATH_HADOOP_CONFIG, null);
possibleHadoopConfPaths[1] = System.getenv("HADOOP_CONF_DIR");
if (System.getenv("HADOOP_HOME") != null) {
possibleHadoopConfPaths[2] = System.getenv("HADOOP_HOME") + "/conf";
possibleHadoopConfPaths[3] = System.getenv("HADOOP_HOME") + "/etc/hadoop";
}
for (String possibleHadoopConfPath : possibleHadoopConfPaths) {
if (possibleHadoopConfPath != null) {
if (new File(possibleHadoopConfPath).exists()) {
if (new File(possibleHadoopConfPath + "/core-site.xml").exists()) {
retConf.addResource(new org.apache.hadoop.fs.Path(possibleHadoopConfPath + "/core-site.xml"));
if (LOG.isDebugEnabled()) {
LOG.debug("Adding " + possibleHadoopConfPath + "/core-site.xml to hadoop configuration");
}
}
if (new File(possibleHadoopConfPath + "/hdfs-site.xml").exists()) {
retConf.addResource(new org.apache.hadoop.fs.Path(possibleHadoopConfPath + "/hdfs-site.xml"));
if (LOG.isDebugEnabled()) {
LOG.debug("Adding " + possibleHadoopConfPath + "/hdfs-site.xml to hadoop configuration");
}
}
}
}
}
return retConf;
} | LOG.debug("Cannot find hdfs-default configuration file"); | public static Configuration getHadoopConfiguration(org.apache.flink.configuration.Configuration flinkConfiguration) {
Configuration result = new Configuration();
boolean foundHadoopConfiguration = false;
final String hdfsDefaultPath =
flinkConfiguration.getString(ConfigConstants.HDFS_DEFAULT_CONFIG, null);
if (hdfsDefaultPath != null) {
result.addResource(new org.apache.hadoop.fs.Path(hdfsDefaultPath));
LOG.debug("Using hdfs-default configuration-file path form Flink config: {}", hdfsDefaultPath);
foundHadoopConfiguration = true;
} else {
LOG.debug("Cannot find hdfs-default configuration-file path in Flink config.");
}
final String hdfsSitePath = flinkConfiguration.getString(ConfigConstants.HDFS_SITE_CONFIG, null);
if (hdfsSitePath != null) {
result.addResource(new org.apache.hadoop.fs.Path(hdfsSitePath));
LOG.debug("Using hdfs-site configuration-file path form Flink config: {}", hdfsSitePath);
foundHadoopConfiguration = true;
} else {
LOG.debug("Cannot find hdfs-site configuration-file path in Flink config.");
}
String[] possibleHadoopConfPaths = new String[4];
possibleHadoopConfPaths[0] = flinkConfiguration.getString(ConfigConstants.PATH_HADOOP_CONFIG, null);
possibleHadoopConfPaths[1] = System.getenv("HADOOP_CONF_DIR");
if (System.getenv("HADOOP_HOME") != null) {
possibleHadoopConfPaths[2] = System.getenv("HADOOP_HOME") + "/conf";
possibleHadoopConfPaths[3] = System.getenv("HADOOP_HOME") + "/etc/hadoop";
}
for (String possibleHadoopConfPath : possibleHadoopConfPaths) {
if (possibleHadoopConfPath != null) {
if (new File(possibleHadoopConfPath).exists()) {
if (new File(possibleHadoopConfPath + "/core-site.xml").exists()) {
result.addResource(new org.apache.hadoop.fs.Path(possibleHadoopConfPath + "/core-site.xml"));
LOG.debug("Adding " + possibleHadoopConfPath + "/core-site.xml to hadoop configuration");
foundHadoopConfiguration = true;
}
if (new File(possibleHadoopConfPath + "/hdfs-site.xml").exists()) {
result.addResource(new org.apache.hadoop.fs.Path(possibleHadoopConfPath + "/hdfs-site.xml"));
LOG.debug("Adding " + possibleHadoopConfPath + "/hdfs-site.xml to hadoop configuration");
foundHadoopConfiguration = true;
}
}
}
}
if (!foundHadoopConfiguration) {
LOG.debug("Could not find Hadoop configuration via any of the supported methods " +
"(Flink configuration, environment variables).");
}
return result;
} | class HadoopUtils {
private static final Logger LOG = LoggerFactory.getLogger(HadoopUtils.class);
private static final Text HDFS_DELEGATION_TOKEN_KIND = new Text("HDFS_DELEGATION_TOKEN");
/**
* Indicates whether the current user has an HDFS delegation token.
*/
public static boolean hasHDFSDelegationToken() throws Exception {
UserGroupInformation loginUser = UserGroupInformation.getCurrentUser();
Collection<Token<? extends TokenIdentifier>> usrTok = loginUser.getTokens();
for (Token<? extends TokenIdentifier> token : usrTok) {
if (token.getKind().equals(HDFS_DELEGATION_TOKEN_KIND)) {
return true;
}
}
return false;
}
} | class HadoopUtils {
private static final Logger LOG = LoggerFactory.getLogger(HadoopUtils.class);
private static final Text HDFS_DELEGATION_TOKEN_KIND = new Text("HDFS_DELEGATION_TOKEN");
/**
* Indicates whether the current user has an HDFS delegation token.
*/
public static boolean hasHDFSDelegationToken() throws Exception {
UserGroupInformation loginUser = UserGroupInformation.getCurrentUser();
Collection<Token<? extends TokenIdentifier>> usrTok = loginUser.getTokens();
for (Token<? extends TokenIdentifier> token : usrTok) {
if (token.getKind().equals(HDFS_DELEGATION_TOKEN_KIND)) {
return true;
}
}
return false;
}
} |
I thought about it, but the methods are really small so it's worth decreasing readability here | private JobGraph getStatefulParDoAfterGroupByKeyChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Iterable<Long>>> aggregate =
pipeline
.apply(
Create.of(KV.of("foo", 1L), KV.of("bar", 1L))
.withCoder(KvCoder.of(StringUtf8Coder.of(), VarLongCoder.of())))
.apply(GroupByKey.create());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
} | .apply(GroupByKey.create()); | private JobGraph getStatefulParDoAfterGroupByKeyChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Iterable<Long>>> aggregate =
pipeline
.apply(
Create.of(KV.of("foo", 1L), KV.of("bar", 1L))
.withCoder(KvCoder.of(StringUtf8Coder.of(), VarLongCoder.of())))
.apply(GroupByKey.create());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
} | class FlinkStreamingPipelineTranslatorTest {
@Test
public void testAutoBalanceShardKeyResolvesMaxParallelism() {
int parallelism = 3;
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, -1, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, 0, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
}
@Test
public void testAutoBalanceShardKeyCacheIsNotSerialized() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
assertNull(fn.getCache());
fn.assignShardKey("target/destination1", "one", 10);
fn.assignShardKey("target/destination2", "two", 10);
assertThat(fn.getCache().size(), equalTo(2));
assertThat(SerializableUtils.clone(fn).getCache(), nullValue());
}
@Test
public void testAutoBalanceShardKeyCacheIsStable() throws Exception {
int numShards = 50;
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
List<KV<String, String>> inputs = Lists.newArrayList();
for (int i = 0; i < numShards * 100; i++) {
inputs.add(KV.of("target/destination/1", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/2", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/3", UUID.randomUUID().toString()));
}
Map<KV<String, Integer>, ShardedKey<Integer>> generatedKeys = new HashMap<>();
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
generatedKeys.put(KV.of(input.getKey(), shardKey.getShardNumber()), shardKey);
}
fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
Collections.shuffle(inputs);
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
ShardedKey<Integer> expectedShardKey =
generatedKeys.get(KV.of(input.getKey(), shardKey.getShardNumber()));
if (expectedShardKey != null) {
assertThat(shardKey, equalTo(expectedShardKey));
}
}
}
@Test
public void testAutoBalanceShardKeyCacheMaxSize() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
for (int i = 0; i < FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE * 2; i++) {
fn.assignShardKey(UUID.randomUUID().toString(), "one", 2);
}
assertThat(
fn.getCache().size(), equalTo(FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE));
}
@Test
public void testStatefulParDoAfterCombineChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
private JobGraph getStatefulParDoAfterCombineChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Long>> aggregate =
pipeline
.apply(Create.of("foo", "bar").withCoder(StringUtf8Coder.of()))
.apply(Count.perElement());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
}
@Test
public void testStatefulParDoAfterGroupByKeyChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
private static class StatelessIdentityDoFn<KeyT, ValueT>
extends DoFn<KV<KeyT, ValueT>, KV<KeyT, ValueT>> {
@ProcessElement
public void processElement(ProcessContext ctx) {
ctx.output(ctx.element());
}
}
private static class StatefulNoopDoFn<KeyT, ValueT> extends DoFn<KV<KeyT, ValueT>, Void> {
@TimerId("my-timer")
private final TimerSpec myTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME);
@ProcessElement
public void processElement() {
}
@OnTimer("my-timer")
public void onMyTimer() {
}
}
} | class FlinkStreamingPipelineTranslatorTest {
@Test
public void testAutoBalanceShardKeyResolvesMaxParallelism() {
int parallelism = 3;
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, -1, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
assertThat(
new FlinkAutoBalancedShardKeyShardingFunction<>(parallelism, 0, StringUtf8Coder.of())
.getMaxParallelism(),
equalTo(KeyGroupRangeAssignment.computeDefaultMaxParallelism(parallelism)));
}
@Test
public void testAutoBalanceShardKeyCacheIsNotSerialized() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
assertNull(fn.getCache());
fn.assignShardKey("target/destination1", "one", 10);
fn.assignShardKey("target/destination2", "two", 10);
assertThat(fn.getCache().size(), equalTo(2));
assertThat(SerializableUtils.clone(fn).getCache(), nullValue());
}
@Test
public void testAutoBalanceShardKeyCacheIsStable() throws Exception {
int numShards = 50;
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
List<KV<String, String>> inputs = Lists.newArrayList();
for (int i = 0; i < numShards * 100; i++) {
inputs.add(KV.of("target/destination/1", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/2", UUID.randomUUID().toString()));
inputs.add(KV.of("target/destination/3", UUID.randomUUID().toString()));
}
Map<KV<String, Integer>, ShardedKey<Integer>> generatedKeys = new HashMap<>();
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
generatedKeys.put(KV.of(input.getKey(), shardKey.getShardNumber()), shardKey);
}
fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(
numShards / 2, numShards * 2, StringUtf8Coder.of());
Collections.shuffle(inputs);
for (KV<String, String> input : inputs) {
ShardedKey<Integer> shardKey = fn.assignShardKey(input.getKey(), input.getValue(), numShards);
ShardedKey<Integer> expectedShardKey =
generatedKeys.get(KV.of(input.getKey(), shardKey.getShardNumber()));
if (expectedShardKey != null) {
assertThat(shardKey, equalTo(expectedShardKey));
}
}
}
@Test
public void testAutoBalanceShardKeyCacheMaxSize() throws Exception {
FlinkAutoBalancedShardKeyShardingFunction<String, String> fn =
new FlinkAutoBalancedShardKeyShardingFunction<>(2, 2, StringUtf8Coder.of());
for (int i = 0; i < FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE * 2; i++) {
fn.assignShardKey(UUID.randomUUID().toString(), "one", 2);
}
assertThat(
fn.getCache().size(), equalTo(FlinkAutoBalancedShardKeyShardingFunction.CACHE_MAX_SIZE));
}
@Test
public void testStatefulParDoAfterCombineChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterCombineChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
private JobGraph getStatefulParDoAfterCombineChainingJobGraph(boolean stablePartitioning) {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
final FlinkStreamingPipelineTranslator translator =
new FlinkStreamingPipelineTranslator(env, PipelineOptionsFactory.create());
final PipelineOptions pipelineOptions = PipelineOptionsFactory.create();
pipelineOptions.setRunner(FlinkRunner.class);
final Pipeline pipeline = Pipeline.create(pipelineOptions);
PCollection<KV<String, Long>> aggregate =
pipeline
.apply(Create.of("foo", "bar").withCoder(StringUtf8Coder.of()))
.apply(Count.perElement());
if (!stablePartitioning) {
aggregate = aggregate.apply(ParDo.of(new StatelessIdentityDoFn<>()));
}
aggregate.apply(ParDo.of(new StatefulNoopDoFn<>()));
translator.translate(pipeline);
return env.getStreamGraph().getJobGraph();
}
@Test
public void testStatefulParDoAfterGroupByKeyChaining() {
final JobGraph stablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(true);
final JobGraph unstablePartitioning = getStatefulParDoAfterGroupByKeyChainingJobGraph(false);
Assert.assertEquals(
1,
Iterables.size(unstablePartitioning.getVertices())
- Iterables.size(stablePartitioning.getVertices()));
}
private static class StatelessIdentityDoFn<KeyT, ValueT>
extends DoFn<KV<KeyT, ValueT>, KV<KeyT, ValueT>> {
@ProcessElement
public void processElement(ProcessContext ctx) {
ctx.output(ctx.element());
}
}
private static class StatefulNoopDoFn<KeyT, ValueT> extends DoFn<KV<KeyT, ValueT>, Void> {
@TimerId("my-timer")
private final TimerSpec myTimer = TimerSpecs.timer(TimeDomain.EVENT_TIME);
@ProcessElement
public void processElement() {
}
@OnTimer("my-timer")
public void onMyTimer() {
}
}
} |
Wouldn't this indicate a critical failure - tracing as warning and ignoring unexpected backend response seems to make it harder than necessary to identify breaking changes / debug it? | public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
} | } catch (NumberFormatException ex) { | new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String ownerValue = lease.getOwner();
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
if (ownerValue == null) {
ownerValue = "";
}
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
return Pair.of(ownerValue + "_" + lease.getLeaseToken(), 0);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
estimatedLag = -1;
}
return Pair.of(ownerValue + "_" + lease.getLeaseToken() + "_" + currentLsn + "_" + latestLsn, estimatedLag);
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
final String pkRangeIdSeparator = ":";
final String segmentSeparator = "
final String lsnPropertyName = "_lsn";
final String tsPropertyName = "_ts";
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(pkRangeIdSeparator));
String[] segments = parsedSessionToken.split(segmentSeparator);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setId(lease.getId())
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken())
.setLastUpdate(ZonedDateTime.parse(lease.getTimestamp()).toInstant());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0);
changeFeedProcessorState.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(lsnPropertyName).asText(null));
try {
changeFeedProcessorState.setContinuationTokenTimestamp(Instant.ofEpochMilli(Long.valueOf(
feedResponse.getResults().get(0).get(tsPropertyName).asText("0"))));
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos _ts found", ex);
changeFeedProcessorState.setContinuationTokenTimestamp(null);
}
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(lsnPropertyName).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} | class ChangeFeedProcessorBuilderImpl implements ChangeFeedProcessor, AutoCloseable {
private static final String PK_RANGE_ID_SEPARATOR = ":";
private static final String SEGMENT_SEPARATOR = "
private static final String PROPERTY_NAME_LSN = "_lsn";
private static final String PROPERTY_NAME_TS = "_ts";
private final Logger logger = LoggerFactory.getLogger(ChangeFeedProcessorBuilderImpl.class);
private static final long DefaultUnhealthinessDuration = Duration.ofMinutes(15).toMillis();
private final Duration sleepTime = Duration.ofSeconds(15);
private final Duration lockTime = Duration.ofSeconds(30);
private static final int DefaultQueryPartitionsMaxBatchSize = 100;
private int queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
private int degreeOfParallelism = 25;
private String hostName;
private ChangeFeedContextClient feedContextClient;
private ChangeFeedProcessorOptions changeFeedProcessorOptions;
private ChangeFeedObserverFactory observerFactory;
private volatile String databaseResourceId;
private volatile String collectionResourceId;
private ChangeFeedContextClient leaseContextClient;
private PartitionLoadBalancingStrategy loadBalancingStrategy;
private PartitionProcessorFactory partitionProcessorFactory;
private LeaseStoreManager leaseStoreManager;
private HealthMonitor healthMonitor;
private volatile PartitionManager partitionManager;
private Scheduler scheduler;
/**
* Start listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> start() {
if (this.partitionManager == null) {
return this.initializeCollectionPropertiesForBuild()
.flatMap( value -> this.getLeaseStoreManager()
.flatMap(leaseStoreManager -> this.buildPartitionManager(leaseStoreManager)))
.flatMap(partitionManager1 -> {
this.partitionManager = partitionManager1;
return this.partitionManager.start();
});
} else {
return partitionManager.start();
}
}
/**
* Stops listening for changes asynchronously.
*
* @return a representation of the deferred computation of this call.
*/
@Override
public Mono<Void> stop() {
if (this.partitionManager == null || !this.partitionManager.isRunning()) {
throw new IllegalStateException("The ChangeFeedProcessor instance has not fully started");
}
return this.partitionManager.stop();
}
/**
* Returns the state of the change feed processor.
*
* @return true if the change feed processor is currently active and running.
*/
@Override
public boolean isStarted() {
return this.partitionManager != null && this.partitionManager.isRunning();
}
/**
* Returns the current owner (host) and an approximation of the difference between the last processed item (defined
* by the state of the feed container) and the latest change in the container for each partition (lease
* document).
* <p>
* An empty map will be returned if the processor was not started or no lease documents matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a map representing the current owner and lease token, the current LSN and latest LSN, and the estimated
* lag, asynchronously.
*/
@Override
public Mono<Map<String, Integer>> getEstimatedLag() {
Map<String, Integer> earlyResult = new ConcurrentHashMap<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(earlyResult);
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = );
})
.collectList()
.map(valueList -> {
Map<String, Integer> result = new ConcurrentHashMap<>();
for (Pair<String, Integer> pair : valueList) {
result.put(pair.getKey(), pair.getValue());
}
return result;
});
}
/**
* Returns a list of states each representing one scoped worker item.
* <p>
* An empty list will be returned if the processor was not started or no lease items matching the current
* {@link ChangeFeedProcessor} instance's lease prefix could be found.
*
* @return a list of states each representing one scoped worker item.
*/
@Override
public Mono<List<ChangeFeedProcessorState>> getCurrentState() {
List<ChangeFeedProcessorState> earlyResult = new ArrayList<>();
if (this.leaseStoreManager == null || this.feedContextClient == null) {
return Mono.just(Collections.unmodifiableList(earlyResult));
}
return this.leaseStoreManager.getAllLeases()
.flatMap(lease -> {
ChangeFeedOptions options = new ChangeFeedOptions()
.setMaxItemCount(1)
.setPartitionKeyRangeId(lease.getLeaseToken())
.setStartFromBeginning(true)
.setRequestContinuation(lease.getContinuationToken());
return this.feedContextClient.createDocumentChangeFeedQuery(this.feedContextClient.getContainerClient(), options)
.take(1)
.map(feedResponse -> {
String sessionTokenLsn = feedResponse.getSessionToken();
String parsedSessionToken = sessionTokenLsn.substring(sessionTokenLsn.indexOf(PK_RANGE_ID_SEPARATOR));
String[] segments = StringUtils.split(parsedSessionToken, SEGMENT_SEPARATOR);
String latestLsn = segments[0];
if (segments.length >= 2) {
latestLsn = segments[1];
}
ChangeFeedProcessorState changeFeedProcessorState = new ChangeFeedProcessorState()
.setHostName(lease.getOwner())
.setLeaseToken(lease.getLeaseToken());
if (feedResponse.getResults() == null || feedResponse.getResults().size() == 0) {
changeFeedProcessorState.setEstimatedLag(0)
.setContinuationToken(latestLsn);
return changeFeedProcessorState;
}
changeFeedProcessorState.setContinuationToken(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText(null));
Integer currentLsn = 0;
Integer estimatedLag = 0;
try {
currentLsn = Integer.valueOf(feedResponse.getResults().get(0).get(PROPERTY_NAME_LSN).asText("0"));
estimatedLag = Integer.valueOf(latestLsn);
estimatedLag = estimatedLag - currentLsn + 1;
changeFeedProcessorState.setEstimatedLag(estimatedLag);
} catch (NumberFormatException ex) {
logger.warn("Unexpected Cosmos LSN found", ex);
changeFeedProcessorState.setEstimatedLag(-1);
}
return changeFeedProcessorState;
});
})
.collectList()
.map(Collections::unmodifiableList);
}
/**
* Sets the host name.
*
* @param hostName the name to be used for the host. When using multiple hosts, each host must have a unique name.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl hostName(String hostName) {
this.hostName = hostName;
return this;
}
/**
* Sets and existing {@link CosmosAsyncContainer} to be used to read from the monitored collection.
*
* @param feedDocumentClient the instance of {@link CosmosAsyncContainer} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl feedContainer(CosmosAsyncContainer feedDocumentClient) {
if (feedDocumentClient == null) {
throw new IllegalArgumentException("feedContextClient");
}
this.feedContextClient = new ChangeFeedContextClientImpl(feedDocumentClient);
return this;
}
/**
* Sets the {@link ChangeFeedProcessorOptions} to be used.
*
* @param changeFeedProcessorOptions the change feed processor options to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl options(ChangeFeedProcessorOptions changeFeedProcessorOptions) {
if (changeFeedProcessorOptions == null) {
throw new IllegalArgumentException("changeFeedProcessorOptions");
}
this.changeFeedProcessorOptions = changeFeedProcessorOptions;
return this;
}
/**
* Sets the {@link ChangeFeedObserverFactory} to be used to generate {@link ChangeFeedObserver}
*
* @param observerFactory The instance of {@link ChangeFeedObserverFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observerFactory(ChangeFeedObserverFactory observerFactory) {
if (observerFactory == null) {
throw new IllegalArgumentException("observerFactory");
}
this.observerFactory = observerFactory;
return this;
}
/**
* Sets an existing {@link ChangeFeedObserver} type to be used by a {@link ChangeFeedObserverFactory} to process changes.
* @param type the type of {@link ChangeFeedObserver} to be used.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl observer(Class<? extends ChangeFeedObserver> type) {
if (type == null) {
throw new IllegalArgumentException("type");
}
this.observerFactory = new ChangeFeedObserverFactoryImpl(type);
return this;
}
public ChangeFeedProcessorBuilderImpl handleChanges(Consumer<List<JsonNode>> consumer) {
return this.observerFactory(new DefaultObserverFactory(consumer));
}
/**
* Sets the database resource ID of the monitored collection.
*
* @param databaseResourceId the database resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withDatabaseResourceId(String databaseResourceId) {
this.databaseResourceId = databaseResourceId;
return this;
}
/**
* Sets the collection resource ID of the monitored collection.
* @param collectionResourceId the collection resource ID of the monitored collection.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withCollectionResourceId(String collectionResourceId) {
this.collectionResourceId = collectionResourceId;
return this;
}
/**
* Sets an existing {@link CosmosAsyncContainer} to be used to read from the leases collection.
*
* @param leaseClient the instance of {@link CosmosAsyncContainer} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl leaseContainer(CosmosAsyncContainer leaseClient) {
if (leaseClient == null) {
throw new IllegalArgumentException("leaseClient");
}
if (!getContextClient(leaseClient).isContentResponseOnWriteEnabled()) {
throw new IllegalArgumentException("leaseClient: content response on write setting must be enabled");
}
ConsistencyLevel consistencyLevel = getContextClient(leaseClient).getConsistencyLevel();
if (consistencyLevel == ConsistencyLevel.CONSISTENT_PREFIX || consistencyLevel == ConsistencyLevel.EVENTUAL) {
logger.warn("leaseClient consistency level setting are less then expected which is SESSION");
}
this.leaseContextClient = new ChangeFeedContextClientImpl(leaseClient);
return this;
}
/**
* Sets the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
*
* @param loadBalancingStrategy the {@link PartitionLoadBalancingStrategy} to be used for partition load balancing.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionLoadBalancingStrategy(PartitionLoadBalancingStrategy loadBalancingStrategy) {
if (loadBalancingStrategy == null) {
throw new IllegalArgumentException("loadBalancingStrategy");
}
this.loadBalancingStrategy = loadBalancingStrategy;
return this;
}
/**
* Sets the {@link PartitionProcessorFactory} to be used to create {@link PartitionProcessor} for partition processing.
*
* @param partitionProcessorFactory the instance of {@link PartitionProcessorFactory} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withPartitionProcessorFactory(PartitionProcessorFactory partitionProcessorFactory) {
if (partitionProcessorFactory == null) {
throw new IllegalArgumentException("partitionProcessorFactory");
}
this.partitionProcessorFactory = partitionProcessorFactory;
return this;
}
/**
* Sets the {@link LeaseStoreManager} to be used to manage leases.
*
* @param leaseStoreManager the instance of {@link LeaseStoreManager} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withLeaseStoreManager(LeaseStoreManager leaseStoreManager) {
if (leaseStoreManager == null) {
throw new IllegalArgumentException("leaseStoreManager");
}
this.leaseStoreManager = leaseStoreManager;
return this;
}
/**
* Sets the {@link HealthMonitor} to be used to monitor unhealthiness situation.
*
* @param healthMonitor The instance of {@link HealthMonitor} to use.
* @return current Builder.
*/
public ChangeFeedProcessorBuilderImpl withHealthMonitor(HealthMonitor healthMonitor) {
if (healthMonitor == null) {
throw new IllegalArgumentException("healthMonitor");
}
this.healthMonitor = healthMonitor;
return this;
}
/**
* Builds a new instance of the {@link ChangeFeedProcessor} with the specified configuration asynchronously.
*
* @return an instance of {@link ChangeFeedProcessor}.
*/
public ChangeFeedProcessor build() {
if (this.hostName == null) {
throw new IllegalArgumentException("Host name was not specified");
}
if (this.observerFactory == null) {
throw new IllegalArgumentException("Observer was not specified");
}
if (this.changeFeedProcessorOptions != null && this.changeFeedProcessorOptions.getLeaseAcquireInterval().compareTo(ChangeFeedProcessorOptions.DEFAULT_ACQUIRE_INTERVAL) < 0) {
logger.warn("Found lower than expected setting for leaseAcquireInterval");
}
if (this.scheduler == null) {
this.scheduler = Schedulers.elastic();
}
return this;
}
public ChangeFeedProcessorBuilderImpl() {
this.queryPartitionsMaxBatchSize = DefaultQueryPartitionsMaxBatchSize;
this.degreeOfParallelism = 25;
}
public ChangeFeedProcessorBuilderImpl(PartitionManager partitionManager) {
this.partitionManager = partitionManager;
}
private Mono<ChangeFeedProcessor> initializeCollectionPropertiesForBuild() {
if (this.changeFeedProcessorOptions == null) {
this.changeFeedProcessorOptions = new ChangeFeedProcessorOptions();
}
return this.feedContextClient
.readDatabase(this.feedContextClient.getDatabaseClient(), null)
.map( databaseResourceResponse -> {
this.databaseResourceId = databaseResourceResponse.getProperties().getId();
return this.databaseResourceId;
})
.flatMap( id -> this.feedContextClient
.readContainer(this.feedContextClient.getContainerClient(), null)
.map(documentCollectionResourceResponse -> {
this.collectionResourceId = documentCollectionResourceResponse.getProperties().getId();
return this;
}));
}
private Mono<LeaseStoreManager> getLeaseStoreManager() {
if (this.leaseStoreManager == null) {
return this.leaseContextClient.readContainerSettings(this.leaseContextClient.getContainerClient(), null)
.flatMap( collectionSettings -> {
boolean isPartitioned =
collectionSettings.getPartitionKeyDefinition() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths() != null &&
collectionSettings.getPartitionKeyDefinition().getPaths().size() > 0;
if (!isPartitioned || (collectionSettings.getPartitionKeyDefinition().getPaths().size() != 1 || !collectionSettings.getPartitionKeyDefinition().getPaths().get(0).equals("/id"))) {
return Mono.error(new IllegalArgumentException("The lease collection must have partition key equal to id."));
}
RequestOptionsFactory requestOptionsFactory = new PartitionedByIdCollectionRequestOptionsFactory();
String leasePrefix = this.getLeasePrefix();
return LeaseStoreManager.builder()
.leasePrefix(leasePrefix)
.leaseCollectionLink(this.leaseContextClient.getContainerClient())
.leaseContextClient(this.leaseContextClient)
.requestOptionsFactory(requestOptionsFactory)
.hostName(this.hostName)
.build()
.map(manager -> {
this.leaseStoreManager = manager;
return this.leaseStoreManager;
});
});
}
return Mono.just(this.leaseStoreManager);
}
private String getLeasePrefix() {
String optionsPrefix = this.changeFeedProcessorOptions.getLeasePrefix();
if (optionsPrefix == null) {
optionsPrefix = "";
}
URI uri = this.feedContextClient.getServiceEndpoint();
return String.format(
"%s%s_%s_%s",
optionsPrefix,
uri.getHost(),
this.databaseResourceId,
this.collectionResourceId);
}
private Mono<PartitionManager> buildPartitionManager(LeaseStoreManager leaseStoreManager) {
CheckpointerObserverFactory factory = new CheckpointerObserverFactory(this.observerFactory, new CheckpointFrequency());
PartitionSynchronizerImpl synchronizer = new PartitionSynchronizerImpl(
this.feedContextClient,
this.feedContextClient.getContainerClient(),
leaseStoreManager,
leaseStoreManager,
this.degreeOfParallelism,
this.queryPartitionsMaxBatchSize
);
Bootstrapper bootstrapper = new BootstrapperImpl(synchronizer, leaseStoreManager, this.lockTime, this.sleepTime);
PartitionSupervisorFactory partitionSupervisorFactory = new PartitionSupervisorFactoryImpl(
factory,
leaseStoreManager,
this.partitionProcessorFactory != null ? this.partitionProcessorFactory : new PartitionProcessorFactoryImpl(
this.feedContextClient,
this.changeFeedProcessorOptions,
leaseStoreManager,
this.feedContextClient.getContainerClient()),
this.changeFeedProcessorOptions,
this.scheduler
);
if (this.loadBalancingStrategy == null) {
this.loadBalancingStrategy = new EqualPartitionsBalancingStrategy(
this.hostName,
this.changeFeedProcessorOptions.getMinScaleCount(),
this.changeFeedProcessorOptions.getMaxScaleCount(),
this.changeFeedProcessorOptions.getLeaseExpirationInterval());
}
PartitionController partitionController = new PartitionControllerImpl(leaseStoreManager, leaseStoreManager, partitionSupervisorFactory, synchronizer, scheduler);
if (this.healthMonitor == null) {
this.healthMonitor = new TraceHealthMonitor();
}
PartitionController partitionController2 = new HealthMonitoringPartitionControllerDecorator(partitionController, this.healthMonitor);
PartitionLoadBalancer partitionLoadBalancer = new PartitionLoadBalancerImpl(
partitionController2,
leaseStoreManager,
this.loadBalancingStrategy,
this.changeFeedProcessorOptions.getLeaseAcquireInterval(),
this.scheduler
);
PartitionManager partitionManager = new PartitionManagerImpl(bootstrapper, partitionController, partitionLoadBalancer);
return Mono.just(partitionManager);
}
@Override
public void close() {
this.stop().subscribeOn(Schedulers.elastic()).subscribe();
}
} |
Actually, we require Jandex 2.1+ and re-index dependencies if necessary (https://github.com/quarkusio/quarkus/blob/master/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java#L200-L202). So `hasNoArgsConstructor()` should be enough. I'll update the PR. | public void addMissingConstructors() throws Exception {
Set<ClassInfo> targetClasses = new HashSet<>();
Set<DotName> normalScopes = initNormalScopes();
for (DotName normalScope : normalScopes) {
collectTargetClasses(targetClasses, normalScope);
}
for (Iterator<ClassInfo> iterator = targetClasses.iterator(); iterator.hasNext();) {
ClassInfo targetClass = iterator.next();
if (targetClass.hasNoArgsConstructor()
|| targetClass.methods().stream().anyMatch(m -> m.name().equals("<init>") && m.parameters().isEmpty())) {
iterator.remove();
}
}
if (targetClasses.isEmpty()) {
return;
}
Set<DotName> transformedClasses = new HashSet<>();
for (ClassInfo targetClass : targetClasses) {
String superClassName;
if (targetClass.superName() == null) {
superClassName = "java/lang/Object";
} else {
ClassInfo superClass = combinedIndex.getIndex().getClassByName(targetClass.superName());
if (superClass != null) {
if (superClass.hasNoArgsConstructor() || targetClass.methods().stream()
.anyMatch(m -> m.name().equals("<init>") && m.parameters().isEmpty())) {
superClassName = superClass.name().toString().replace('.', '/');
} else {
superClassName = null;
}
} else {
superClassName = null;
}
}
if (superClassName != null) {
transformedClasses.add(targetClass.name());
LOGGER.debugf("Adding no-args constructor to %s", targetClass);
transformers.produce(new BytecodeTransformerBuildItem(targetClass.name().toString(),
new BiFunction<String, ClassVisitor, ClassVisitor>() {
@Override
public ClassVisitor apply(String className, ClassVisitor classVisitor) {
ClassVisitor cv = new ClassVisitor(Opcodes.ASM6, classVisitor) {
@Override
public void visit(int version, int access, String name, String signature, String superName,
String[] interfaces) {
super.visit(version, access, name, signature, superName, interfaces);
MethodVisitor mv = visitMethod(Modifier.PUBLIC, "<init>", "()V", null,
null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, superClassName, "<init>", "()V", false);
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
}
};
return cv;
}
}));
}
}
if (!transformedClasses.isEmpty()) {
validators.produce(new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() {
@Override
public boolean skipValidation(InjectionTargetInfo target, ValidationRule rule) {
return ValidationRule.NO_ARGS_CONSTRUCTOR.equals(rule) && target.kind() == TargetKind.BEAN
&& transformedClasses.contains(target.asBean().getBeanClass());
}
}));
}
} | || targetClass.methods().stream().anyMatch(m -> m.name().equals("<init>") && m.parameters().isEmpty())) { | public void addMissingConstructors() throws Exception {
Set<ClassInfo> targetClasses = new HashSet<>();
Set<DotName> normalScopes = initNormalScopes();
for (DotName normalScope : normalScopes) {
collectTargetClasses(targetClasses, normalScope);
}
for (Iterator<ClassInfo> iterator = targetClasses.iterator(); iterator.hasNext();) {
ClassInfo targetClass = iterator.next();
if (targetClass.hasNoArgsConstructor()) {
iterator.remove();
}
}
if (targetClasses.isEmpty()) {
return;
}
Set<DotName> transformedClasses = new HashSet<>();
for (ClassInfo targetClass : targetClasses) {
String superClassName;
if (targetClass.superName() == null) {
superClassName = "java/lang/Object";
} else {
ClassInfo superClass = combinedIndex.getIndex().getClassByName(targetClass.superName());
if (superClass != null && superClass.hasNoArgsConstructor()) {
superClassName = superClass.name().toString().replace('.', '/');
} else {
superClassName = null;
}
}
if (superClassName != null) {
transformedClasses.add(targetClass.name());
LOGGER.debugf("Adding no-args constructor to %s", targetClass);
transformers.produce(new BytecodeTransformerBuildItem(targetClass.name().toString(),
new BiFunction<String, ClassVisitor, ClassVisitor>() {
@Override
public ClassVisitor apply(String className, ClassVisitor classVisitor) {
ClassVisitor cv = new ClassVisitor(Opcodes.ASM6, classVisitor) {
@Override
public void visit(int version, int access, String name, String signature, String superName,
String[] interfaces) {
super.visit(version, access, name, signature, superName, interfaces);
MethodVisitor mv = visitMethod(Modifier.PUBLIC, "<init>", "()V", null,
null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, superClassName, "<init>", "()V", false);
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
}
};
return cv;
}
}));
}
}
if (!transformedClasses.isEmpty()) {
validators.produce(new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() {
@Override
public boolean skipValidation(InjectionTargetInfo target, ValidationRule rule) {
return ValidationRule.NO_ARGS_CONSTRUCTOR.equals(rule) && target.kind() == TargetKind.BEAN
&& transformedClasses.contains(target.asBean().getBeanClass());
}
}));
}
} | class NoArgsConstructorProcessor {
private static final Logger LOGGER = Logger.getLogger(NoArgsConstructorProcessor.class);
private static final int ANNOTATION = 0x00002000;
@Inject
BeanArchiveIndexBuildItem beanArchiveIndex;
@Inject
CombinedIndexBuildItem combinedIndex;
@Inject
BuildProducer<BytecodeTransformerBuildItem> transformers;
@Inject
BuildProducer<BeanDeploymentValidatorBuildItem> validators;
@BuildStep
private Set<DotName> initNormalScopes() {
Set<DotName> normalScopes = new HashSet<>();
normalScopes.add(BuiltinScope.APPLICATION.getName());
normalScopes.add(BuiltinScope.REQUEST.getName());
combinedIndex.getIndex().getAnnotations(DotName.createSimple(NormalScope.class.getName())).stream()
.filter(a -> a.target().kind() == Kind.CLASS && isAnnotation(a.target().asClass())).map(a -> a.name())
.forEach(normalScopes::add);
return normalScopes;
}
private void collectTargetClasses(Set<ClassInfo> targetClasses, DotName normalScope) {
for (AnnotationInstance annotationInstance : beanArchiveIndex.getIndex()
.getAnnotations(normalScope)) {
if (annotationInstance.target().kind() == Kind.CLASS) {
targetClasses.add(annotationInstance.target().asClass());
}
}
}
private boolean isAnnotation(ClassInfo classInfo) {
return (classInfo.flags() & ANNOTATION) != 0;
}
} | class NoArgsConstructorProcessor {
private static final Logger LOGGER = Logger.getLogger(NoArgsConstructorProcessor.class);
private static final int ANNOTATION = 0x00002000;
@Inject
BeanArchiveIndexBuildItem beanArchiveIndex;
@Inject
CombinedIndexBuildItem combinedIndex;
@Inject
BuildProducer<BytecodeTransformerBuildItem> transformers;
@Inject
BuildProducer<BeanDeploymentValidatorBuildItem> validators;
@BuildStep
private Set<DotName> initNormalScopes() {
Set<DotName> normalScopes = new HashSet<>();
normalScopes.add(BuiltinScope.APPLICATION.getName());
normalScopes.add(BuiltinScope.REQUEST.getName());
combinedIndex.getIndex().getAnnotations(DotName.createSimple(NormalScope.class.getName())).stream()
.filter(NoArgsConstructorProcessor::isTargetAnnotation)
.map(AnnotationInstance::name)
.forEach(normalScopes::add);
return normalScopes;
}
private void collectTargetClasses(Set<ClassInfo> targetClasses, DotName normalScope) {
for (AnnotationInstance annotationInstance : beanArchiveIndex.getIndex()
.getAnnotations(normalScope)) {
if (annotationInstance.target().kind() == Kind.CLASS) {
targetClasses.add(annotationInstance.target().asClass());
}
}
}
private static boolean isTargetAnnotation(AnnotationInstance annotationInstance) {
return annotationInstance.target().kind() == Kind.CLASS
&& ((annotationInstance.target().asClass().flags() & ANNOTATION) != 0);
}
} |
I can't repeat the same thing again. Read this comment. https://github.com/ballerina-platform/ballerina-lang/pull/10157#discussion_r212197848 | public void execute(Context context) {
BMap<String, BValue> socketStruct;
try {
socketStruct = (BMap<String, BValue>) context.getRefArgument(0);
int port = (int) context.getIntArgument(0);
BValue networkInterface = context.getNullableRefArgument(1);
SocketChannel socket = (SocketChannel) socketStruct.getNativeData(SocketConstants.SOCKET_KEY);
if (networkInterface == null) {
socket.bind(new InetSocketAddress(port));
} else {
socket.bind(new InetSocketAddress(networkInterface.stringValue(), port));
}
} catch (IOException e) {
String message = "Error occurred while bind the socket address: " + e.getMessage();
log.error(message, e);
context.setReturnValues(IOUtils.createError(context, message));
}
} | String message = "Error occurred while bind the socket address: " + e.getMessage(); | public void execute(Context context) {
BMap<String, BValue> socketStruct;
int port = (int) context.getIntArgument(0);
try {
socketStruct = (BMap<String, BValue>) context.getRefArgument(0);
BValue networkInterface = context.getNullableRefArgument(1);
SocketChannel socket = (SocketChannel) socketStruct.getNativeData(SocketConstants.SOCKET_KEY);
if (networkInterface == null) {
socket.bind(new InetSocketAddress(port));
} else {
socket.bind(new InetSocketAddress(networkInterface.stringValue(), port));
}
} catch (ConnectionPendingException e) {
String message = "Socket initialization already in process. Unable to bind to the port.";
context.setReturnValues(IOUtils.createError(context, message));
} catch (AlreadyBoundException e) {
String message = "Unable to bind to the port: " + port + ". Socket is already bound.";
context.setReturnValues(IOUtils.createError(context, message));
} catch (UnsupportedAddressTypeException e) {
String message = "Socket address doesn't support for a TCP connection.";
context.setReturnValues(IOUtils.createError(context, message));
} catch (ClosedChannelException e) {
String message = "Socket connection is already closed.";
context.setReturnValues(IOUtils.createError(context, message));
} catch (IOException e) {
String message = "Error occurred while bind to the socket address: " + e.getMessage();
log.error(message, e);
context.setReturnValues(IOUtils.createError(context, message));
} catch (SecurityException e) {
String message = "Unknown error occurred.";
log.error(message, e);
context.setReturnValues(IOUtils.createError(context, message));
} catch (Throwable e) {
String message = "An error occurred.";
log.error(message, e);
context.setReturnValues(IOUtils.createError(context, message));
}
} | class BindAddress extends BlockingNativeCallableUnit {
private static final Logger log = LoggerFactory.getLogger(BindAddress.class);
@Override
} | class BindAddress extends BlockingNativeCallableUnit {
private static final Logger log = LoggerFactory.getLogger(BindAddress.class);
@Override
} |
Add some descriptions here to explain what the contents of each step in active buffer are? | void testBasicRun() {
TestAsyncExecutionController<String, String> aec =
new TestAsyncExecutionController<>(
new SyncMailboxExecutor(), new TestStateExecutor());
TestUnderlyingState underlyingState = new TestUnderlyingState();
TestValueState valueState = new TestValueState(aec, underlyingState);
AtomicInteger output = new AtomicInteger();
Runnable userCode =
() -> {
valueState
.asyncValue()
.thenCompose(
val -> {
int updated = (val == null ? 1 : (val + 1));
return valueState
.asyncUpdate(updated)
.thenCompose(
o ->
StateFutureUtils.completedFuture(
updated));
})
.thenAccept(val -> output.set(val));
};
String record1 = "key1-r1";
String key1 = "key1";
RecordContext<String, String> recordContext1 = aec.buildContext(record1, key1);
aec.setCurrentContext(recordContext1);
userCode.run();
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(1);
assertThat(recordContext1.getReferenceCount()).isEqualTo(0);
String record2 = "key1-r2";
String key2 = "key1";
RecordContext<String, String> recordContext2 = aec.buildContext(record2, key2);
aec.setCurrentContext(recordContext2);
userCode.run();
String record3 = "key1-r3";
String key3 = "key1";
RecordContext<String, String> recordContext3 = aec.buildContext(record3, key3);
aec.setCurrentContext(recordContext3);
userCode.run();
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.blockingBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.blockingBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(2);
assertThat(recordContext2.getReferenceCount()).isEqualTo(0);
assertThat(aec.blockingBuffer.size()).isEqualTo(1);
aec.migrateBlockingToActive();
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.blockingBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(3);
assertThat(recordContext3.getReferenceCount()).isEqualTo(0);
String record4 = "key3-r3";
String key4 = "key3";
RecordContext<String, String> recordContext4 = aec.buildContext(record4, key4);
aec.setCurrentContext(recordContext4);
userCode.run();
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(1);
assertThat(recordContext4.getReferenceCount()).isEqualTo(0);
} | assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1); | void testBasicRun() {
TestAsyncExecutionController<String, String> aec =
new TestAsyncExecutionController<>(
new SyncMailboxExecutor(), new TestStateExecutor());
TestUnderlyingState underlyingState = new TestUnderlyingState();
TestValueState valueState = new TestValueState(aec, underlyingState);
AtomicInteger output = new AtomicInteger();
Runnable userCode =
() -> {
valueState
.asyncValue()
.thenCompose(
val -> {
int updated = (val == null ? 1 : (val + 1));
return valueState
.asyncUpdate(updated)
.thenCompose(
o ->
StateFutureUtils.completedFuture(
updated));
})
.thenAccept(val -> output.set(val));
};
String record1 = "key1-r1";
String key1 = "key1";
RecordContext<String, String> recordContext1 = aec.buildContext(record1, key1);
aec.setCurrentContext(recordContext1);
userCode.run();
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(1);
assertThat(recordContext1.getReferenceCount()).isEqualTo(0);
String record2 = "key1-r2";
String key2 = "key1";
RecordContext<String, String> recordContext2 = aec.buildContext(record2, key2);
aec.setCurrentContext(recordContext2);
userCode.run();
String record3 = "key1-r3";
String key3 = "key1";
RecordContext<String, String> recordContext3 = aec.buildContext(record3, key3);
aec.setCurrentContext(recordContext3);
userCode.run();
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.blockingBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.blockingBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(2);
assertThat(recordContext2.getReferenceCount()).isEqualTo(0);
assertThat(aec.blockingBuffer.size()).isEqualTo(1);
aec.migrateBlockingToActive();
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.blockingBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(3);
assertThat(recordContext3.getReferenceCount()).isEqualTo(0);
String record4 = "key3-r3";
String key4 = "key3";
RecordContext<String, String> recordContext4 = aec.buildContext(record4, key4);
aec.setCurrentContext(recordContext4);
userCode.run();
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(1);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(1);
aec.triggerIfNeeded(true);
assertThat(aec.activeBuffer.size()).isEqualTo(0);
assertThat(aec.keyAccountingUnit.occupiedCount()).isEqualTo(0);
assertThat(output.get()).isEqualTo(1);
assertThat(recordContext4.getReferenceCount()).isEqualTo(0);
} | class AsyncExecutionControllerTest {
@Test
/**
* An AsyncExecutionController for testing purpose, which integrates with basic buffer
* mechanism.
*/
static class TestAsyncExecutionController<R, K> extends AsyncExecutionController<R, K> {
LinkedList<StateRequest<K, ?, ?>> activeBuffer;
LinkedList<StateRequest<K, ?, ?>> blockingBuffer;
public TestAsyncExecutionController(
MailboxExecutor mailboxExecutor, StateExecutor stateExecutor) {
super(mailboxExecutor, stateExecutor);
activeBuffer = new LinkedList<>();
blockingBuffer = new LinkedList<>();
}
@Override
<IN, OUT> void insertActiveBuffer(StateRequest<K, IN, OUT> request) {
activeBuffer.push(request);
}
<IN, OUT> void insertBlockingBuffer(StateRequest<K, IN, OUT> request) {
blockingBuffer.push(request);
}
void triggerIfNeeded(boolean force) {
if (!force) {
return;
}
LinkedList<StateRequest<?, ?, ?>> toRun = new LinkedList<>(activeBuffer);
activeBuffer.clear();
stateExecutor.executeBatchRequests(toRun);
}
void migrateBlockingToActive() {
Iterator<StateRequest<K, ?, ?>> blockingIter = blockingBuffer.iterator();
while (blockingIter.hasNext()) {
StateRequest<K, ?, ?> request = blockingIter.next();
if (request.getRecordContext().tryOccupyKey()) {
insertActiveBuffer(request);
blockingIter.remove();
}
}
}
}
/** Simulate the underlying state that is actually used to execute the request. */
static class TestUnderlyingState {
private HashMap<String, Integer> hashMap;
public TestUnderlyingState() {
this.hashMap = new HashMap<>();
}
public Integer get(String key) {
return hashMap.get(key);
}
public void update(String key, Integer val) {
hashMap.put(key, val);
}
}
static class TestValueState implements ValueState<Integer> {
private AsyncExecutionController<String, String> asyncExecutionController;
private TestUnderlyingState underlyingState;
public TestValueState(
AsyncExecutionController<String, String> aec, TestUnderlyingState underlyingState) {
this.asyncExecutionController = aec;
this.underlyingState = underlyingState;
}
@Override
public StateFuture<Void> asyncClear() {
StateRequest<String, Void, Void> request =
new StateRequest(this, RequestType.DELETE, null);
return asyncExecutionController.handleRequest(request);
}
@Override
public StateFuture<Integer> asyncValue() {
StateRequest<String, Void, Integer> request =
new StateRequest<>(this, RequestType.GET, null);
return asyncExecutionController.handleRequest(request);
}
@Override
public StateFuture<Void> asyncUpdate(Integer value) {
StateRequest<String, Integer, Void> request =
new StateRequest<>(this, RequestType.PUT, value);
return asyncExecutionController.handleRequest(request);
}
}
/**
* A brief implementation of {@link StateExecutor}, to illustrate the interaction between AEC
* and StateExecutor.
*/
static class TestStateExecutor implements StateExecutor {
public TestStateExecutor() {}
@Override
public CompletableFuture<Boolean> executeBatchRequests(
Iterable<StateRequest<?, ?, ?>> processingRequests) {
CompletableFuture<Boolean> future = new CompletableFuture<>();
for (StateRequest request : processingRequests) {
if (request.getRequestType() == RequestType.GET) {
Preconditions.checkState(request.getState() != null);
TestValueState state = (TestValueState) request.getState();
Integer val =
state.underlyingState.get((String) request.getRecordContext().getKey());
request.getFuture().complete(val);
} else if (request.getRequestType() == RequestType.PUT) {
Preconditions.checkState(request.getState() != null);
TestValueState state = (TestValueState) request.getState();
state.underlyingState.update(
(String) request.getRecordContext().getKey(),
(Integer) request.getPayload());
request.getFuture().complete(null);
} else {
throw new UnsupportedOperationException("Unsupported request type");
}
}
future.complete(true);
return future;
}
}
} | class AsyncExecutionControllerTest {
@Test
/**
* An AsyncExecutionController for testing purpose, which integrates with basic buffer
* mechanism.
*/
static class TestAsyncExecutionController<R, K> extends AsyncExecutionController<R, K> {
LinkedList<StateRequest<K, ?, ?>> activeBuffer;
LinkedList<StateRequest<K, ?, ?>> blockingBuffer;
public TestAsyncExecutionController(
MailboxExecutor mailboxExecutor, StateExecutor stateExecutor) {
super(mailboxExecutor, stateExecutor);
activeBuffer = new LinkedList<>();
blockingBuffer = new LinkedList<>();
}
@Override
<IN, OUT> void insertActiveBuffer(StateRequest<K, IN, OUT> request) {
activeBuffer.push(request);
}
<IN, OUT> void insertBlockingBuffer(StateRequest<K, IN, OUT> request) {
blockingBuffer.push(request);
}
void triggerIfNeeded(boolean force) {
if (!force) {
return;
}
LinkedList<StateRequest<?, ?, ?>> toRun = new LinkedList<>(activeBuffer);
activeBuffer.clear();
stateExecutor.executeBatchRequests(toRun);
}
@SuppressWarnings("unchecked")
void migrateBlockingToActive() {
Iterator<StateRequest<K, ?, ?>> blockingIter = blockingBuffer.iterator();
while (blockingIter.hasNext()) {
StateRequest<K, ?, ?> request = blockingIter.next();
if (tryOccupyKey((RecordContext<R, K>) request.getRecordContext())) {
insertActiveBuffer(request);
blockingIter.remove();
}
}
}
}
/** Simulate the underlying state that is actually used to execute the request. */
static class TestUnderlyingState {
private final HashMap<String, Integer> hashMap;
public TestUnderlyingState() {
this.hashMap = new HashMap<>();
}
public Integer get(String key) {
return hashMap.get(key);
}
public void update(String key, Integer val) {
hashMap.put(key, val);
}
}
static class TestValueState implements ValueState<Integer> {
private final AsyncExecutionController<String, String> asyncExecutionController;
private final TestUnderlyingState underlyingState;
public TestValueState(
AsyncExecutionController<String, String> aec, TestUnderlyingState underlyingState) {
this.asyncExecutionController = aec;
this.underlyingState = underlyingState;
}
@Override
public StateFuture<Void> asyncClear() {
return asyncExecutionController.handleRequest(this, StateRequestType.CLEAR, null);
}
@Override
public StateFuture<Integer> asyncValue() {
return asyncExecutionController.handleRequest(this, StateRequestType.VALUE_GET, null);
}
@Override
public StateFuture<Void> asyncUpdate(Integer value) {
return asyncExecutionController.handleRequest(
this, StateRequestType.VALUE_UPDATE, value);
}
}
/**
* A brief implementation of {@link StateExecutor}, to illustrate the interaction between AEC
* and StateExecutor.
*/
static class TestStateExecutor implements StateExecutor {
public TestStateExecutor() {}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public CompletableFuture<Boolean> executeBatchRequests(
Iterable<StateRequest<?, ?, ?>> processingRequests) {
CompletableFuture<Boolean> future = new CompletableFuture<>();
for (StateRequest request : processingRequests) {
if (request.getRequestType() == StateRequestType.VALUE_GET) {
Preconditions.checkState(request.getState() != null);
TestValueState state = (TestValueState) request.getState();
Integer val =
state.underlyingState.get((String) request.getRecordContext().getKey());
request.getFuture().complete(val);
} else if (request.getRequestType() == StateRequestType.VALUE_UPDATE) {
Preconditions.checkState(request.getState() != null);
TestValueState state = (TestValueState) request.getState();
state.underlyingState.update(
(String) request.getRecordContext().getKey(),
(Integer) request.getPayload());
request.getFuture().complete(null);
} else {
throw new UnsupportedOperationException("Unsupported request type");
}
}
future.complete(true);
return future;
}
}
} |
You will need to update this error message. | public void testMultipleSelectStatementsThrowsException() {
String sql = "SELECT 1; SELECT 2;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage(
"Statement list must end in a SELECT statement, and cannot contain more than one SELECT statement.");
zetaSQLQueryPlanner.convertToBeamRel(sql);
} | "Statement list must end in a SELECT statement, and cannot contain more than one SELECT statement."); | public void testMultipleSelectStatementsThrowsException() {
String sql = "SELECT 1; SELECT 2;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("No additional statements are allowed after a SELECT statement.");
zetaSQLQueryPlanner.convertToBeamRel(sql);
} | class ZetaSQLDialectSpecTest extends ZetaSQLTestBase {
@Rule public transient TestPipeline pipeline = TestPipeline.create();
@Rule public ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() {
initializeBeamTableProvider();
initializeCalciteEnvironment();
}
@Test
public void testSimpleSelect() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryPlannerClass() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
PCollection<Row> stream =
pipeline.apply(SqlTransform.query(sql).withQueryPlannerClass(ZetaSQLQueryPlanner.class));
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testPlannerNamePipelineOption() {
pipeline
.getOptions()
.as(BeamSqlPipelineOptions.class)
.setPlannerName("org.apache.beam.sdk.extensions.sql.zetasql.ZetaSQLQueryPlanner");
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
PCollection<Row> stream = pipeline.apply(SqlTransform.query(sql));
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testByteLiterals() {
String sql = "SELECT b'abc'";
byte[] byteString = new byte[] {'a', 'b', 'c'};
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BYTES).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(byteString).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testByteString() {
String sql = "SELECT @p0 IS NULL AS ColA";
ByteString byteString = ByteString.copyFrom(new byte[] {0x62});
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder().put("p0", Value.createBytesValue(byteString)).build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFloat() {
String sql = "SELECT 3.0";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.DOUBLE).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3.0).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStringLiterals() {
String sql = "SELECT '\"America/Los_Angeles\"\\n'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("\"America/Los_Angeles\"\n").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParameterString() {
String sql = "SELECT ?";
ImmutableList<Value> params = ImmutableList.of(Value.createStringValue("abc\n"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc\n").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testEQ1() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_BOOL))
.put("p1", Value.createBoolValue(true))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore(
"Does not support inf/-inf/nan in double/float literals because double/float literals are"
+ " converted to BigDecimal in Calcite codegen.")
public void testEQ2() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createDoubleValue(0))
.put("p1", Value.createDoubleValue(Double.POSITIVE_INFINITY))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addBooleanField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testEQ3() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE))
.put("p1", Value.createDoubleValue(3.14))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEQ4() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createBytesValue(ByteString.copyFromUtf8("hello")))
.put("p1", Value.createBytesValue(ByteString.copyFromUtf8("hello")))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEQ5() {
String sql = "SELECT b'hello' = b'hello' AS ColA";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEQ6() {
String sql = "SELECT ? = ? AS ColA";
ImmutableList<Value> params =
ImmutableList.of(Value.createInt64Value(4L), Value.createInt64Value(5L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNotNull1() {
String sql = "SELECT @p0 IS NOT NULL AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNotNull2() {
String sql = "SELECT @p0 IS NOT NULL AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createNullValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNotNull3() {
String sql = "SELECT @p0 IS NOT NULL AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createNullValue(
TypeFactory.createStructType(
Arrays.asList(
new StructField(
"a", TypeFactory.createSimpleType(TypeKind.TYPE_STRING))))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfBasic() {
String sql = "SELECT IF(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createBoolValue(true),
"p1",
Value.createInt64Value(1),
"p2",
Value.createInt64Value(2));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfPositional() {
String sql = "SELECT IF(?, ?, ?) AS ColA";
ImmutableList<Value> params =
ImmutableList.of(
Value.createBoolValue(true), Value.createInt64Value(1), Value.createInt64Value(2));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceBasic() {
String sql = "SELECT COALESCE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1",
Value.createStringValue("yay"),
"p2",
Value.createStringValue("nay"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceSingleArgument() {
String sql = "SELECT COALESCE(@p0) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_INT64));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceNullArray() {
String sql = "SELECT COALESCE(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createNullValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))),
"p1",
Value.createNullValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testNullIfCoercion() {
String sql = "SELECT NULLIF(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createInt64Value(3L),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.DOUBLE).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(3.0).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceNullStruct() {
String sql = "SELECT COALESCE(NULL, STRUCT(\"a\" AS s, -33 AS i))";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema innerSchema =
Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64));
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.row(innerSchema)).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(Row.withSchema(innerSchema).addValues("a", -33L).build())
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfTimestamp() {
String sql = "SELECT IF(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createBoolValue(false),
"p1",
Value.createTimestampValueFromUnixMicros(0),
"p2",
Value.createTimestampValueFromUnixMicros(
DateTime.parse("2019-01-01T00:00:00Z").getMillis() * 1000));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", DATETIME).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(DateTime.parse("2019-01-01T00:00:00Z")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("$make_array is not implemented")
public void testMakeArray() {
String sql = "SELECT [s3, s1, s2] FROM (SELECT \"foo\" AS s1, \"bar\" AS s2, \"baz\" AS s3);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.array(FieldType.STRING)).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(ImmutableList.of("baz", "foo", "bar")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNullIfPositive() {
String sql = "SELECT NULLIF(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("null"), "p1", Value.createStringValue("null"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNullIfNegative() {
String sql = "SELECT NULLIF(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("foo"), "p1", Value.createStringValue("null"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfNullPositive() {
String sql = "SELECT IFNULL(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("foo"), "p1", Value.createStringValue("default"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfNullNegative() {
String sql = "SELECT IFNULL(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1",
Value.createStringValue("yay"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEmptyArrayParameter() {
String sql = "SELECT @p0 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createArrayValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)),
ImmutableList.of()));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addArrayField("field1", FieldType.INT64).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEmptyArrayLiteral() {
String sql = "SELECT ARRAY<STRING>[];";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addArrayField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLike1() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("ab%"), "p1", Value.createStringValue("ab\\%"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testLikeNullPattern() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createStringValue("ab%"),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLikeAllowsEscapingNonSpecialCharacter() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue("ab"), "p1", Value.createStringValue("\\ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLikeAllowsEscapingBackslash() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("a\\c"), "p1", Value.createStringValue("a\\\\c"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLikeBytes() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createBytesValue(ByteString.copyFromUtf8("abcd")),
"p1",
Value.createBytesValue(ByteString.copyFromUtf8("__%")));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testMod() {
String sql = "SELECT MOD(4, 2)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSimpleUnionAll() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING) "
+ " UNION ALL "
+ " SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build(),
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testThreeWayUnionAll() {
String sql = "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L).build(),
Row.withSchema(schema).addValues(2L).build(),
Row.withSchema(schema).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSimpleUnionDISTINCT() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING) "
+ " UNION DISTINCT "
+ " SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLInnerJoin() {
String sql =
"SELECT t1.Key "
+ "FROM KeyValue AS t1"
+ " INNER JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey AND t1.ts = t2.ts";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLInnerJoinWithUsing() {
String sql = "SELECT t1.Key " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2 USING(ts)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLInnerJoinTwo() {
String sql =
"SELECT t2.RowKey "
+ "FROM KeyValue AS t1"
+ " INNER JOIN BigTable AS t2"
+ " on "
+ " t2.RowKey = t1.Key AND t2.ts = t1.ts";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLLeftOuterJoin() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " LEFT JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schemaOne =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addNullableField("field4", FieldType.INT64)
.addNullableField("field5", FieldType.STRING)
.addNullableField("field6", DATETIME)
.build();
final Schema schemaTwo =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schemaOne)
.addValues(
14L,
"KeyValue234",
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
null,
null,
null)
.build(),
Row.withSchema(schemaTwo)
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLRightOuterJoin() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " RIGHT JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schemaOne =
Schema.builder()
.addNullableField("field1", FieldType.INT64)
.addNullableField("field2", FieldType.STRING)
.addNullableField("field3", DATETIME)
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
final Schema schemaTwo =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schemaOne)
.addValues(
null,
null,
null,
16L,
"BigTable236",
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schemaTwo)
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLFullOuterJoin() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " FULL JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schemaOne =
Schema.builder()
.addNullableField("field1", FieldType.INT64)
.addNullableField("field2", FieldType.STRING)
.addNullableField("field3", DATETIME)
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
final Schema schemaTwo =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
final Schema schemaThree =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addNullableField("field4", FieldType.INT64)
.addNullableField("field5", FieldType.STRING)
.addNullableField("field6", DATETIME)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schemaOne)
.addValues(
null,
null,
null,
16L,
"BigTable236",
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schemaTwo)
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schemaThree)
.addValues(
14L,
"KeyValue234",
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
null,
null,
null)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("BeamSQL only supports equal join")
public void testZetaSQLFullOuterJoinTwo() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " FULL JOIN BigTable AS t2"
+ " on "
+ " t1.Key + t2.RowKey = 30";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLFullOuterJoinFalse() {
String sql = "SELECT * FROM KeyValue AS t1 FULL JOIN BigTable AS t2 ON false";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
thrown.expect(UnsupportedOperationException.class);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
}
@Test
public void testZetaSQLThreeWayInnerJoin() {
String sql =
"SELECT t3.Value, t2.Value, t1.Value, t1.Key, t3.ColId FROM KeyValue as t1 "
+ "JOIN BigTable as t2 "
+ "ON (t1.Key = t2.RowKey) "
+ "JOIN Spanner as t3 "
+ "ON (t3.ColId = t1.Key)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addStringField("t3.Value")
.addStringField("t2.Value")
.addStringField("t1.Value")
.addInt64Field("t1.Key")
.addInt64Field("t3.ColId")
.build())
.addValues("Spanner235", "BigTable235", "KeyValue235", 15L, 15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLTableJoinOnItselfWithFiltering() {
String sql =
"SELECT * FROM Spanner as t1 "
+ "JOIN Spanner as t2 "
+ "ON (t1.ColId = t2.ColId) WHERE t1.ColId = 17";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addInt64Field("field3")
.addStringField("field4")
.build())
.addValues(17L, "Spanner237", 17L, "Spanner237")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromSelect() {
String sql = "SELECT * FROM (SELECT \"apple\" AS fruit, \"carrot\" AS vegetable);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addStringField("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("apple", "carrot").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(2, outputSchema.getFieldCount());
Assert.assertEquals("fruit", outputSchema.getField(0).getName());
Assert.assertEquals("vegetable", outputSchema.getField(1).getName());
}
@Test
public void testZetaSQLSelectFromTable() {
String sql = "SELECT Key, Value FROM KeyValue;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableLimit() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT 2;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableLimit0() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT 0;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectNullLimitParam() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT @lmt;";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Limit requires non-null count and offset");
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
}
@Test
public void testZetaSQLSelectNullOffsetParam() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT 1 OFFSET @lmt;";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Limit requires non-null count and offset");
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
}
@Test
public void testZetaSQLSelectFromTableOrderLimit() {
String sql =
"SELECT x, y FROM (SELECT 1 as x, 0 as y UNION ALL SELECT 0, 0 "
+ "UNION ALL SELECT 1, 0 UNION ALL SELECT 1, 1) ORDER BY x LIMIT 1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L, 0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableLimitOffset() {
String sql =
"SELECT COUNT(a) FROM (\n"
+ "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3) LIMIT 3 OFFSET 1);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableOrderByLimit() {
String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC LIMIT 2;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableOrderBy() {
String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("ORDER BY without a LIMIT is not supported.");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testZetaSQLSelectFromTableWithStructType2() {
String sql =
"SELECT table_with_struct.struct_col.struct_col_str FROM table_with_struct WHERE id = 1;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("row_one").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInFilter() {
String sql =
"SELECT table_with_struct.id FROM table_with_struct WHERE"
+ " table_with_struct.struct_col.struct_col_str = 'row_one';";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInCast() {
String sql =
"SELECT CAST(table_with_struct.id AS STRING) FROM table_with_struct WHERE"
+ " table_with_struct.struct_col.struct_col_str = 'row_one';";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("1").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9191] CAST operator does not work fully due to bugs in unparsing")
public void testZetaSQLStructFieldAccessInCast2() {
String sql =
"SELECT CAST(A.struct_col.struct_col_str AS TIMESTAMP) FROM table_with_struct_ts_string AS"
+ " A";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testAggregateWithAndWithoutColumnRefs() {
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
String sql =
"SELECT \n"
+ " id, \n"
+ " SUM(has_f1) as f1_count, \n"
+ " SUM(has_f2) as f2_count, \n"
+ " SUM(has_f3) as f3_count, \n"
+ " SUM(has_f4) as f4_count, \n"
+ " SUM(has_f5) as f5_count, \n"
+ " COUNT(*) as count, \n"
+ " SUM(has_f6) as f6_count \n"
+ "FROM (select 0 as id, 1 as has_f1, 2 as has_f2, 3 as has_f3, 4 as has_f4, 5 as has_f5, 6 as has_f6)\n"
+ "GROUP BY id";
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("id")
.addInt64Field("f1_count")
.addInt64Field("f2_count")
.addInt64Field("f3_count")
.addInt64Field("f4_count")
.addInt64Field("f5_count")
.addInt64Field("count")
.addInt64Field("f6_count")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(0L, 1L, 2L, 3L, 4L, 5L, 1L, 6L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInGroupBy() {
String sql = "SELECT rowCol.row_id, COUNT(*) FROM table_with_struct_two GROUP BY rowCol.row_id";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 1L).build(),
Row.withSchema(schema).addValues(2L, 1L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLAnyValueInGroupBy() {
String sql =
"SELECT rowCol.row_id as key, ANY_VALUE(rowCol.data) as any_value FROM table_with_struct_two GROUP BY rowCol.row_id";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Map<Long, List<String>> allowedTuples = new HashMap<>();
allowedTuples.put(1L, Arrays.asList("data1"));
allowedTuples.put(2L, Arrays.asList("data2"));
allowedTuples.put(3L, Arrays.asList("data2", "data3"));
PAssert.that(stream)
.satisfies(
input -> {
Iterator<Row> iter = input.iterator();
while (iter.hasNext()) {
Row row = iter.next();
List<String> values = allowedTuples.remove(row.getInt64("key"));
assertTrue(values != null);
assertTrue(values.contains(row.getString("any_value")));
}
assertTrue(allowedTuples.isEmpty());
return null;
});
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInGroupBy2() {
String sql =
"SELECT rowCol.data, MAX(rowCol.row_id), MIN(rowCol.row_id) FROM table_with_struct_two"
+ " GROUP BY rowCol.data";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addStringField("field1")
.addInt64Field("field2")
.addInt64Field("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("data1", 1L, 1L).build(),
Row.withSchema(schema).addValues("data2", 3L, 2L).build(),
Row.withSchema(schema).addValues("data3", 3L, 3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInnerJoin() {
String sql =
"SELECT A.rowCol.data FROM table_with_struct_two AS A INNER JOIN "
+ "table_with_struct AS B "
+ "ON A.rowCol.row_id = B.id";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue("data1").build(),
Row.withSchema(schema).addValue("data2").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableWithArrayType() {
String sql = "SELECT array_col FROM table_with_array;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addArrayField("field", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(Arrays.asList("1", "2", "3")).build(),
Row.withSchema(schema).addValue(ImmutableList.of()).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectStarFromTable() {
String sql = "SELECT * FROM BigTable;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schema)
.addValues(
16L,
"BigTable236",
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFiltering() {
String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder().addInt64Field("field1").addStringField("field2").build())
.addValues(14L, "KeyValue234")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFilteringTwo() {
String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 AND Value = 'non-existing';";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFilteringThree() {
String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLCountOnAColumn() {
String sql = "SELECT COUNT(Key) FROM KeyValue";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLAggDistinct() {
String sql = "SELECT Key, COUNT(DISTINCT Value) FROM KeyValue GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Does not support COUNT DISTINCT");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testZetaSQLBasicAgg() {
String sql = "SELECT Key, COUNT(*) FROM KeyValue GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, 1L).build(),
Row.withSchema(schema).addValues(15L, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLColumnAlias1() {
String sql = "SELECT Key, COUNT(*) AS count_col FROM KeyValue GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(2, outputSchema.getFieldCount());
Assert.assertEquals("Key", outputSchema.getField(0).getName());
Assert.assertEquals("count_col", outputSchema.getField(1).getName());
}
@Test
public void testZetaSQLColumnAlias2() {
String sql =
"SELECT Key AS k1, (count_col + 1) AS k2 FROM (SELECT Key, COUNT(*) AS count_col FROM"
+ " KeyValue GROUP BY Key)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(2, outputSchema.getFieldCount());
Assert.assertEquals("k1", outputSchema.getField(0).getName());
Assert.assertEquals("k2", outputSchema.getField(1).getName());
}
@Test
public void testZetaSQLColumnAlias3() {
String sql = "SELECT Key AS v1, Value AS v2, ts AS v3 FROM KeyValue";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(3, outputSchema.getFieldCount());
Assert.assertEquals("v1", outputSchema.getField(0).getName());
Assert.assertEquals("v2", outputSchema.getField(1).getName());
Assert.assertEquals("v3", outputSchema.getField(2).getName());
}
@Test
public void testZetaSQLColumnAlias4() {
String sql = "SELECT CAST(123 AS INT64) AS cast_col";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(1, outputSchema.getFieldCount());
Assert.assertEquals("cast_col", outputSchema.getField(0).getName());
}
@Test
public void testZetaSQLAmbiguousAlias() {
String sql = "SELECT row_id as ID, int64_col as ID FROM table_all_types GROUP BY ID;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expectMessage(
"Name ID in GROUP BY clause is ambiguous; it may refer to multiple columns in the"
+ " SELECT-list [at 1:68]");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testZetaSQLAggWithOrdinalReference() {
String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY 1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 2L).build(),
Row.withSchema(schema).addValues(2L, 3L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLAggWithAliasReference() {
String sql = "SELECT Key AS K, COUNT(*) FROM aggregate_test_table GROUP BY K";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 2L).build(),
Row.withSchema(schema).addValues(2L, 3L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg2() {
String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 2L).build(),
Row.withSchema(schema).addValues(2L, 3L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg3() {
String sql = "SELECT Key, Key2, COUNT(*) FROM aggregate_test_table GROUP BY Key2, Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field3")
.addInt64Field("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L).build(),
Row.withSchema(schema).addValues(1L, 11L, 1L).build(),
Row.withSchema(schema).addValues(2L, 11L, 2L).build(),
Row.withSchema(schema).addValues(2L, 12L, 1L).build(),
Row.withSchema(schema).addValues(3L, 13L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg4() {
String sql =
"SELECT Key, Key2, MAX(f_int_1), MIN(f_int_1), SUM(f_int_1), SUM(f_double_1) "
+ "FROM aggregate_test_table GROUP BY Key2, Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field3")
.addInt64Field("field2")
.addInt64Field("field4")
.addInt64Field("field5")
.addDoubleField("field6")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L, 1L, 1L, 1.0).build(),
Row.withSchema(schema).addValues(1L, 11L, 2L, 2L, 2L, 2.0).build(),
Row.withSchema(schema).addValues(2L, 11L, 4L, 3L, 7L, 7.0).build(),
Row.withSchema(schema).addValues(2L, 12L, 5L, 5L, 5L, 5.0).build(),
Row.withSchema(schema).addValues(3L, 13L, 7L, 6L, 13L, 13.0).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg5() {
String sql =
"SELECT Key, Key2, AVG(CAST(f_int_1 AS FLOAT64)), AVG(f_double_1) "
+ "FROM aggregate_test_table GROUP BY Key2, Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field2")
.addDoubleField("field3")
.addDoubleField("field4")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1.0, 1.0).build(),
Row.withSchema(schema).addValues(1L, 11L, 2.0, 2.0).build(),
Row.withSchema(schema).addValues(2L, 11L, 3.5, 3.5).build(),
Row.withSchema(schema).addValues(2L, 12L, 5.0, 5.0).build(),
Row.withSchema(schema).addValues(3L, 13L, 6.5, 6.5).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore(
"Calcite infers return type of AVG(int64) as BIGINT while ZetaSQL requires it as either"
+ " NUMERIC or DOUBLE/FLOAT64")
public void testZetaSQLTestAVG() {
String sql = "SELECT Key, AVG(f_int_1)" + "FROM aggregate_test_table GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field2")
.addInt64Field("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L).build(),
Row.withSchema(schema).addValues(1L, 11L, 6L).build(),
Row.withSchema(schema).addValues(2L, 11L, 6L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLGroupByExprInSelect() {
String sql = "SELECT int64_col + 1 FROM table_all_types GROUP BY int64_col + 1;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(0L).build(),
Row.withSchema(schema).addValue(-1L).build(),
Row.withSchema(schema).addValue(-2L).build(),
Row.withSchema(schema).addValue(-3L).build(),
Row.withSchema(schema).addValue(-4L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLGroupByAndFiltering() {
String sql = "SELECT int64_col FROM table_all_types WHERE int64_col = 1 GROUP BY int64_col;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLGroupByAndFilteringOnNonGroupByColumn() {
String sql = "SELECT int64_col FROM table_all_types WHERE double_col = 0.5 GROUP BY int64_col;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(-5L).build(),
Row.withSchema(schema).addValue(-4L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicHaving() {
String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key HAVING COUNT(*) > 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L, 3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLHavingNull() {
String sql = "SELECT SUM(int64_val) FROM all_null_table GROUP BY primary_key HAVING false";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream).empty();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFixedWindowing() {
String sql =
"SELECT "
+ "COUNT(*) as field_count, "
+ "TUMBLE_START(\"INTERVAL 1 SECOND\") as window_start, "
+ "TUMBLE_END(\"INTERVAL 1 SECOND\") as window_end "
+ "FROM KeyValue "
+ "GROUP BY TUMBLE(ts, \"INTERVAL 1 SECOND\");";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("count_start")
.addDateTimeField("field1")
.addDateTimeField("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1L,
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schema)
.addValues(
1L,
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryOne() {
String sql =
"SELECT a.Value, a.Key FROM (SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15)"
+ " as a;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryTwo() {
String sql =
"SELECT a.Key, a.Key2, COUNT(*) FROM "
+ " (SELECT * FROM aggregate_test_table WHERE Key != 10) as a "
+ " GROUP BY a.Key2, a.Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field3")
.addInt64Field("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L).build(),
Row.withSchema(schema).addValues(1L, 11L, 1L).build(),
Row.withSchema(schema).addValues(2L, 11L, 2L).build(),
Row.withSchema(schema).addValues(2L, 12L, 1L).build(),
Row.withSchema(schema).addValues(3L, 13L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryThree() {
String sql =
"SELECT * FROM (SELECT * FROM KeyValue) AS t1 INNER JOIN (SELECT * FROM BigTable) AS t2 on"
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addInt64Field("Key")
.addStringField("Value")
.addDateTimeField("ts")
.addInt64Field("RowKey")
.addStringField("Value2")
.addDateTimeField("ts2")
.build())
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryFive() {
String sql =
"SELECT a.Value, a.Key FROM (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15)"
+ " as a;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateLiteral() {
String sql = "SELECT DATE '2020-3-30'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2020, 3, 30))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateColumn() {
String sql = "SELECT FORMAT_DATE('%b-%d-%Y', date_field) FROM table_with_date";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_date_str").build())
.addValues("Dec-25-2008")
.build(),
Row.withSchema(Schema.builder().addStringField("f_date_str").build())
.addValues("Apr-07-2020")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractDate() {
String sql =
"WITH Dates AS (\n"
+ " SELECT DATE '2015-12-31' AS date UNION ALL\n"
+ " SELECT DATE '2016-01-01'\n"
+ ")\n"
+ "SELECT\n"
+ " EXTRACT(ISOYEAR FROM date) AS isoyear,\n"
+ " EXTRACT(YEAR FROM date) AS year,\n"
+ " EXTRACT(ISOWEEK FROM date) AS isoweek,\n"
+ " EXTRACT(MONTH FROM date) AS month\n"
+ "FROM Dates\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("isoyear", FieldType.INT64)
.addField("year", FieldType.INT64)
.addField("isoweek", FieldType.INT64)
.addField("month", FieldType.INT64)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(2015L, 2015L, 53L /* , 52L */, 12L).build(),
Row.withSchema(schema).addValues(2015L, 2016L, 53L /* , 0L */, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateFromYearMonthDay() {
String sql = "SELECT DATE(2008, 12, 25)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2008, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateFromTimestamp() {
String sql = "SELECT DATE(TIMESTAMP '2016-12-25 05:30:00+07', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2016, 12, 24))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateAdd() {
String sql =
"SELECT "
+ "DATE_ADD(DATE '2008-12-25', INTERVAL 5 DAY), "
+ "DATE_ADD(DATE '2008-12-25', INTERVAL 1 MONTH), "
+ "DATE_ADD(DATE '2008-12-25', INTERVAL 1 YEAR), ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_date1", SqlTypes.DATE)
.addLogicalTypeField("f_date2", SqlTypes.DATE)
.addLogicalTypeField("f_date3", SqlTypes.DATE)
.build())
.addValues(
LocalDate.of(2008, 12, 30),
LocalDate.of(2009, 1, 25),
LocalDate.of(2009, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateSub() {
String sql =
"SELECT "
+ "DATE_SUB(DATE '2008-12-25', INTERVAL 5 DAY), "
+ "DATE_SUB(DATE '2008-12-25', INTERVAL 1 MONTH), "
+ "DATE_SUB(DATE '2008-12-25', INTERVAL 1 YEAR), ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_date1", SqlTypes.DATE)
.addLogicalTypeField("f_date2", SqlTypes.DATE)
.addLogicalTypeField("f_date3", SqlTypes.DATE)
.build())
.addValues(
LocalDate.of(2008, 12, 20),
LocalDate.of(2008, 11, 25),
LocalDate.of(2007, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateDiff() {
String sql = "SELECT DATE_DIFF(DATE '2010-07-07', DATE '2008-12-25', DAY)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_date_diff").build())
.addValues(559L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateDiffNegativeResult() {
String sql = "SELECT DATE_DIFF(DATE '2017-12-17', DATE '2017-12-18', ISOWEEK)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_date_diff").build())
.addValues(-1L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateTrunc() {
String sql = "SELECT DATE_TRUNC(DATE '2015-06-15', ISOYEAR)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder().addLogicalTypeField("f_date_trunc", SqlTypes.DATE).build())
.addValues(LocalDate.of(2014, 12, 29))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFormatDate() {
String sql = "SELECT FORMAT_DATE('%b-%d-%Y', DATE '2008-12-25')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_date_str").build())
.addValues("Dec-25-2008")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParseDate() {
String sql = "SELECT PARSE_DATE('%m %d %y', '10 14 18')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2018, 10, 14))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateToUnixInt64() {
String sql = "SELECT UNIX_DATE(DATE '2008-12-25')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_unix_date").build())
.addValues(14238L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateFromUnixInt64() {
String sql = "SELECT DATE_FROM_UNIX_DATE(14238)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2008, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeLiteral() {
String sql = "SELECT TIME '15:30:00', TIME '15:30:00.135246' ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_time1", SqlTypes.TIME)
.addLogicalTypeField("f_time2", SqlTypes.TIME)
.build())
.addValues(LocalTime.of(15, 30, 0))
.addValues(LocalTime.of(15, 30, 0, 135246000))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeColumn() {
String sql = "SELECT FORMAT_TIME('%T', time_field) FROM table_with_time";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_time_str").build())
.addValues("15:30:00")
.build(),
Row.withSchema(Schema.builder().addStringField("f_time_str").build())
.addValues("23:35:59")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractTime() {
String sql =
"SELECT "
+ "EXTRACT(HOUR FROM TIME '15:30:35.123456') as hour, "
+ "EXTRACT(MINUTE FROM TIME '15:30:35.123456') as minute, "
+ "EXTRACT(SECOND FROM TIME '15:30:35.123456') as second, "
+ "EXTRACT(MILLISECOND FROM TIME '15:30:35.123456') as millisecond, "
+ "EXTRACT(MICROSECOND FROM TIME '15:30:35.123456') as microsecond ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("hour", FieldType.INT64)
.addField("minute", FieldType.INT64)
.addField("second", FieldType.INT64)
.addField("millisecond", FieldType.INT64)
.addField("microsecond", FieldType.INT64)
.build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues(15L, 30L, 35L, 123L, 123456L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeFromHourMinuteSecond() {
String sql = "SELECT TIME(15, 30, 0)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_time", SqlTypes.TIME).build())
.addValues(LocalTime.of(15, 30, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeFromTimestamp() {
String sql = "SELECT TIME(TIMESTAMP '2008-12-25 15:30:00+08', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_time", SqlTypes.TIME).build())
.addValues(LocalTime.of(23, 30, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeAdd() {
String sql =
"SELECT "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 MICROSECOND), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 MILLISECOND), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 SECOND), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 MINUTE), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 HOUR) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_time1", SqlTypes.TIME)
.addLogicalTypeField("f_time2", SqlTypes.TIME)
.addLogicalTypeField("f_time3", SqlTypes.TIME)
.addLogicalTypeField("f_time4", SqlTypes.TIME)
.addLogicalTypeField("f_time5", SqlTypes.TIME)
.build())
.addValues(
LocalTime.of(15, 30, 0, 10000),
LocalTime.of(15, 30, 0, 10000000),
LocalTime.of(15, 30, 10, 0),
LocalTime.of(15, 40, 0, 0),
LocalTime.of(1, 30, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeSub() {
String sql =
"SELECT "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 MICROSECOND), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 MILLISECOND), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 SECOND), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 MINUTE), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 HOUR) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_time1", SqlTypes.TIME)
.addLogicalTypeField("f_time2", SqlTypes.TIME)
.addLogicalTypeField("f_time3", SqlTypes.TIME)
.addLogicalTypeField("f_time4", SqlTypes.TIME)
.addLogicalTypeField("f_time5", SqlTypes.TIME)
.build())
.addValues(
LocalTime.of(15, 29, 59, 999990000),
LocalTime.of(15, 29, 59, 990000000),
LocalTime.of(15, 29, 50, 0),
LocalTime.of(15, 20, 0, 0),
LocalTime.of(5, 30, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeDiff() {
String sql = "SELECT TIME_DIFF(TIME '15:30:00', TIME '14:35:00', MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_time_diff").build())
.addValues(55L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeDiffNegativeResult() {
String sql = "SELECT TIME_DIFF(TIME '14:35:00', TIME '15:30:00', MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_time_diff").build())
.addValues(-55L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeTrunc() {
String sql = "SELECT TIME_TRUNC(TIME '15:30:35', HOUR)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder().addLogicalTypeField("f_time_trunc", SqlTypes.TIME).build())
.addValues(LocalTime.of(15, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFormatTime() {
String sql = "SELECT FORMAT_TIME('%R', TIME '15:30:00')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_time_str").build())
.addValues("15:30")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParseTime() {
String sql = "SELECT PARSE_TIME('%H', '15')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_time", SqlTypes.TIME).build())
.addValues(LocalTime.of(15, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("https:
public void testCastBetweenTimeAndString() {
String sql =
"SELECT CAST(s1 as TIME) as t2, CAST(t1 as STRING) as s2 FROM "
+ "(SELECT '12:34:56.123456' as s1, TIME '12:34:56.123456' as t1)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("t2", SqlTypes.TIME)
.addStringField("s2")
.build())
.addValues(LocalTime.of(12, 34, 56, 123456000), "12:34:56.123456")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampMicrosecondUnsupported() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2000-01-01 00:11:22.345678+00' as timestamp\n"
+ ")\n"
+ "SELECT\n"
+ " timestamp,\n"
+ " EXTRACT(ISOYEAR FROM timestamp) AS isoyear,\n"
+ " EXTRACT(YEAR FROM timestamp) AS year,\n"
+ " EXTRACT(ISOWEEK FROM timestamp) AS week,\n"
+ " EXTRACT(MINUTE FROM timestamp) AS minute\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testTimestampLiteralWithoutTimeZone() {
String sql = "SELECT TIMESTAMP '2016-12-25 05:30:00'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("field1").build())
.addValues(parseTimestampWithUTCTimeZone("2016-12-25 05:30:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampLiteralWithUTCTimeZone() {
String sql = "SELECT TIMESTAMP '2016-12-25 05:30:00+00'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("field1").build())
.addValues(parseTimestampWithUTCTimeZone("2016-12-25 05:30:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullIntersectDistinct() {
String sql = "SELECT NULL INTERSECT DISTINCT SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
System.err.println("SCHEMA " + stream.getSchema());
PAssert.that(stream).empty();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullIntersectAll() {
String sql = "SELECT NULL INTERSECT ALL SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
System.err.println("SCHEMA " + stream.getSchema());
PAssert.that(stream).empty();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullExceptDistinct() {
String sql = "SELECT NULL EXCEPT DISTINCT SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema()));
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullExceptAll() {
String sql = "SELECT NULL EXCEPT ALL SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema()));
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Test
public void testAlreadyDefinedUDFThrowsException() {
String sql = "CREATE FUNCTION foo() AS (0); CREATE FUNCTION foo() AS (1); SELECT foo();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(ParseException.class);
thrown.expectMessage("Failed to define function foo");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testCreateFunctionNoSelectThrowsException() {
String sql = "CREATE FUNCTION plusOne(x INT64) AS (x + 1);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("Statement list must end in a SELECT statement, not CreateFunctionStmt");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testNullaryUdf() {
String sql = "CREATE FUNCTION zero() AS (0); SELECT zero();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testQualifiedNameUdfUnqualifiedCall() {
String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT baz();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore(
"Qualified paths can't be resolved due to a bug in ZetaSQL: "
+ "https:
public void testQualifiedNameUdfQualifiedCallThrowsException() {
String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT foo.bar.baz();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUnaryUdf() {
String sql = "CREATE FUNCTION triple(x INT64) AS (3 * x); SELECT triple(triple(1));";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUdfWithinUdf() {
String sql =
"CREATE FUNCTION triple(x INT64) AS (3 * x);"
+ " CREATE FUNCTION nonuple(x INT64) as (triple(triple(x)));"
+ " SELECT nonuple(1);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUndefinedUdfThrowsException() {
String sql =
"CREATE FUNCTION foo() AS (bar()); "
+ "CREATE FUNCTION bar() AS (foo()); "
+ "SELECT foo();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(SqlException.class);
thrown.expectMessage("Function not found: bar");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testRecursiveUdfThrowsException() {
String sql = "CREATE FUNCTION omega() AS (omega()); SELECT omega();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(SqlException.class);
thrown.expectMessage("Function not found: omega");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testTimestampLiteralWithNonUTCTimeZone() {
String sql = "SELECT TIMESTAMP '2018-12-10 10:38:59-10:00'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp_with_time_zone").build())
.addValues(parseTimestampWithTimeZone("2018-12-10 10:38:59-1000"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractTimestamp() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2007-12-31 12:34:56' AS timestamp UNION ALL\n"
+ " SELECT TIMESTAMP '2009-12-31'\n"
+ ")\n"
+ "SELECT\n"
+ " EXTRACT(ISOYEAR FROM timestamp) AS isoyear,\n"
+ " EXTRACT(YEAR FROM timestamp) AS year,\n"
+ " EXTRACT(ISOWEEK FROM timestamp) AS isoweek,\n"
+ " EXTRACT(MINUTE FROM timestamp) AS minute\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("isoyear", FieldType.INT64)
.addField("year", FieldType.INT64)
.addField("isoweek", FieldType.INT64)
.addField("minute", FieldType.INT64)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(2008L, 2007L, 1L /* , 53L */, 34L).build(),
Row.withSchema(schema).addValues(2009L, 2009L, 53L /* , 52L */, 0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractTimestampAtTimeZoneUnsupported() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2017-05-26' AS timestamp\n"
+ ")\n"
+ "SELECT\n"
+ " timestamp,\n"
+ " EXTRACT(HOUR FROM timestamp AT TIME ZONE 'America/Vancouver') AS hour,\n"
+ " EXTRACT(DAY FROM timestamp AT TIME ZONE 'America/Vancouver') AS day\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testExtractDateFromTimestampUnsupported() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2017-05-26' AS ts\n"
+ ")\n"
+ "SELECT\n"
+ " ts,\n"
+ " EXTRACT(DATE FROM ts) AS dt\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(SqlException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testStringFromTimestamp() {
String sql = "SELECT STRING(TIMESTAMP '2008-12-25 15:30:00', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_timestamp_string").build())
.addValues("2008-12-25 07:30:00-08")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampFromString() {
String sql = "SELECT TIMESTAMP('2008-12-25 15:30:00', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp").build())
.addValues(parseTimestampWithTimeZone("2008-12-25 15:30:00-08"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampAdd() {
String sql =
"SELECT "
+ "TIMESTAMP_ADD(TIMESTAMP '2008-12-25 15:30:00 UTC', INTERVAL 5+5 MINUTE), "
+ "TIMESTAMP_ADD(TIMESTAMP '2008-12-25 15:30:00+07:30', INTERVAL 10 MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_add")
.addDateTimeField("f_timestamp_with_time_zone_add")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:40:00"),
parseTimestampWithTimeZone("2008-12-25 15:40:00+0730"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampSub() {
String sql =
"SELECT "
+ "TIMESTAMP_SUB(TIMESTAMP '2008-12-25 15:30:00 UTC', INTERVAL 5+5 MINUTE), "
+ "TIMESTAMP_SUB(TIMESTAMP '2008-12-25 15:30:00+07:30', INTERVAL 10 MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_sub")
.addDateTimeField("f_timestamp_with_time_zone_sub")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:20:00"),
parseTimestampWithTimeZone("2008-12-25 15:20:00+0730"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampDiff() {
String sql =
"SELECT TIMESTAMP_DIFF("
+ "TIMESTAMP '2018-10-14 15:30:00.000 UTC', "
+ "TIMESTAMP '2018-08-14 15:05:00.001 UTC', "
+ "MILLISECOND)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_timestamp_diff").build())
.addValues((61L * 24 * 60 + 25) * 60 * 1000 - 1)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampDiffNegativeResult() {
String sql = "SELECT TIMESTAMP_DIFF(TIMESTAMP '2018-08-14', TIMESTAMP '2018-10-14', DAY)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_timestamp_diff").build())
.addValues(-61L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampTrunc() {
String sql = "SELECT TIMESTAMP_TRUNC(TIMESTAMP '2017-11-06 00:00:00+12', ISOWEEK, 'UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp_trunc").build())
.addValues(DateTimeUtils.parseTimestampWithUTCTimeZone("2017-10-30 00:00:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFormatTimestamp() {
String sql = "SELECT FORMAT_TIMESTAMP('%D %T', TIMESTAMP '2018-10-14 15:30:00.123+00', 'UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_timestamp_str").build())
.addValues("10/14/18 15:30:00")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParseTimestamp() {
String sql = "SELECT PARSE_TIMESTAMP('%m-%d-%y %T', '10-14-18 15:30:00', 'UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp").build())
.addValues(DateTimeUtils.parseTimestampWithUTCTimeZone("2018-10-14 15:30:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampFromInt64() {
String sql = "SELECT TIMESTAMP_SECONDS(1230219000), TIMESTAMP_MILLIS(1230219000123) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_seconds")
.addDateTimeField("f_timestamp_millis")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00.123"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampToUnixInt64() {
String sql =
"SELECT "
+ "UNIX_SECONDS(TIMESTAMP '2008-12-25 15:30:00 UTC'), "
+ "UNIX_MILLIS(TIMESTAMP '2008-12-25 15:30:00.123 UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addInt64Field("f_unix_seconds")
.addInt64Field("f_unix_millis")
.build())
.addValues(1230219000L, 1230219000123L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampFromUnixInt64() {
String sql =
"SELECT "
+ "TIMESTAMP_FROM_UNIX_SECONDS(1230219000), "
+ "TIMESTAMP_FROM_UNIX_MILLIS(1230219000123) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_seconds")
.addDateTimeField("f_timestamp_millis")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00.123"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDistinct() {
String sql = "SELECT DISTINCT Key2 FROM aggregate_test_table";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addInt64Field("Key2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(10L).build(),
Row.withSchema(schema).addValues(11L).build(),
Row.withSchema(schema).addValues(12L).build(),
Row.withSchema(schema).addValues(13L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDistinctOnNull() {
String sql = "SELECT DISTINCT str_val FROM all_null_table";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addNullableField("str_val", FieldType.DOUBLE).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testAnyValue() {
String sql = "SELECT ANY_VALUE(double_val) FROM all_null_table";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addNullableField("double_val", FieldType.DOUBLE).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNULL() {
String sql = "SELECT NULL";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addNullableField("long_val", FieldType.INT64).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryOne() {
String sql =
"With T1 AS (SELECT * FROM KeyValue), T2 AS (SELECT * FROM BigTable) SELECT T2.RowKey FROM"
+ " T1 INNER JOIN T2 on T1.Key = T2.RowKey;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryTwo() {
String sql =
"WITH T1 AS (SELECT Key, COUNT(*) as value FROM KeyValue GROUP BY Key) SELECT T1.Key,"
+ " T1.value FROM T1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, 1L).build(),
Row.withSchema(schema).addValues(15L, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryThree() {
String sql =
"WITH T1 as (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15) SELECT T1.Value,"
+ " T1.Key FROM T1;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryFour() {
String sql =
"WITH T1 as (SELECT Value, Key FROM KeyValue) SELECT T1.Value, T1.Key FROM T1 WHERE T1.Key"
+ " = 14 OR T1.Key = 15;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryFive() {
String sql =
"WITH T1 AS (SELECT * FROM KeyValue) SELECT T1.Key, COUNT(*) FROM T1 GROUP BY T1.Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, 1L).build(),
Row.withSchema(schema).addValues(15L, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQuerySix() {
String sql =
"WITH T1 AS (SELECT * FROM window_test_table_two) SELECT "
+ "COUNT(*) as field_count, "
+ "SESSION_START(\"INTERVAL 3 SECOND\") as window_start, "
+ "SESSION_END(\"INTERVAL 3 SECOND\") as window_end "
+ "FROM T1 "
+ "GROUP BY SESSION(ts, \"INTERVAL 3 SECOND\");";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("count_star")
.addDateTimeField("field1")
.addDateTimeField("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
2L,
new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schema)
.addValues(
2L,
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUNNESTLiteral() {
String sql = "SELECT * FROM UNNEST(ARRAY<STRING>['foo', 'bar']);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("str_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("foo").build(),
Row.withSchema(schema).addValues("bar").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUNNESTParameters() {
String sql = "SELECT * FROM UNNEST(@p0);";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createArrayValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_STRING)),
ImmutableList.of(Value.createStringValue("foo"), Value.createStringValue("bar"))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("str_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("foo").build(),
Row.withSchema(schema).addValues("bar").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("BEAM-9515")
public void testUNNESTExpression() {
String sql = "SELECT * FROM UNNEST(ARRAY(SELECT Value FROM KeyValue));";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("str_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234").build(),
Row.withSchema(schema).addValues("KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNamedUNNESTLiteral() {
String sql = "SELECT *, T1 FROM UNNEST(ARRAY<STRING>['foo', 'bar']) AS T1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema =
Schema.builder().addStringField("str_field").addStringField("str2_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("foo", "foo").build(),
Row.withSchema(schema).addValues("bar", "bar").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNamedUNNESTLiteralOffset() {
String sql = "SELECT x, p FROM UNNEST([3, 4]) AS x WITH OFFSET p";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
thrown.expect(UnsupportedOperationException.class);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
}
@Test
public void testUnnestArrayColumn() {
String sql =
"SELECT p FROM table_with_array_for_unnest, UNNEST(table_with_array_for_unnest.int_array_col) as p";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addInt64Field("int_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(14L).build(),
Row.withSchema(schema).addValue(18L).build(),
Row.withSchema(schema).addValue(22L).build(),
Row.withSchema(schema).addValue(24L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStringAggregation() {
String sql =
"SELECT STRING_AGG(fruit) AS string_agg"
+ " FROM UNNEST([\"apple\", \"pear\", \"banana\", \"pear\"]) AS fruit";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("string_field").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValue("apple,pear,banana,pear").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Seeing exception in Beam, need further investigation on the cause of this failed query.")
public void testNamedUNNESTJoin() {
String sql =
"SELECT * "
+ "FROM table_with_array_for_unnest AS t1"
+ " LEFT JOIN UNNEST(t1.int_array_col) AS t2"
+ " on "
+ " t1.int_col = t2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUnnestJoinStruct() {
String sql =
"SELECT b, x FROM UNNEST("
+ "[STRUCT(true AS b, [3, 5] AS arr), STRUCT(false AS b, [7, 9] AS arr)]) t "
+ "LEFT JOIN UNNEST(t.arr) x ON b";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testUnnestJoinLiteral() {
String sql =
"SELECT a, b "
+ "FROM UNNEST([1, 1, 2, 3, 5, 8, 13, NULL]) a "
+ "JOIN UNNEST([1, 2, 3, 5, 7, 11, 13, NULL]) b "
+ "ON a = b";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testUnnestJoinSubquery() {
String sql =
"SELECT a, b "
+ "FROM UNNEST([1, 2, 3]) a "
+ "JOIN UNNEST(ARRAY(SELECT b FROM UNNEST([3, 2, 1]) b)) b "
+ "ON a = b";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testCaseNoValue() {
String sql = "SELECT CASE WHEN 1 > 2 THEN 'not possible' ELSE 'seems right' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValue() {
String sql = "SELECT CASE 1 WHEN 2 THEN 'not possible' ELSE 'seems right' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValueMultipleCases() {
String sql =
"SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' ELSE 'also not"
+ " possible' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValueNoElse() {
String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseNoValueNoElseNoMatch() {
String sql = "SELECT CASE WHEN 'abc' = '123' THEN 'not possible' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build())
.addValue(null)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValueNoElseNoMatch() {
String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build())
.addValue(null)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastToDateWithCase() {
String sql =
"SELECT f_int, \n"
+ "CASE WHEN CHAR_LENGTH(TRIM(f_string)) = 8 \n"
+ " THEN CAST (CONCAT(\n"
+ " SUBSTR(TRIM(f_string), 1, 4) \n"
+ " , '-' \n"
+ " , SUBSTR(TRIM(f_string), 5, 2) \n"
+ " , '-' \n"
+ " , SUBSTR(TRIM(f_string), 7, 2)) AS DATE)\n"
+ " ELSE NULL\n"
+ "END \n"
+ "FROM table_for_case_when";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType =
Schema.builder()
.addInt64Field("f_long")
.addNullableField("f_date", FieldType.logicalType(SqlTypes.DATE))
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L, LocalDate.parse("2018-10-18")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIntersectAll() {
String sql =
"SELECT Key FROM aggregate_test_table "
+ "INTERSECT ALL "
+ "SELECT Key FROM aggregate_test_table_two";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIntersectDistinct() {
String sql =
"SELECT Key FROM aggregate_test_table "
+ "INTERSECT DISTINCT "
+ "SELECT Key FROM aggregate_test_table_two";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExceptAll() {
String sql =
"SELECT Key FROM aggregate_test_table "
+ "EXCEPT ALL "
+ "SELECT Key FROM aggregate_test_table_two";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L).build(),
Row.withSchema(resultType).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectFromEmptyTable() {
String sql = "SELECT * FROM table_empty;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStartsWithString() {
String sql = "SELECT STARTS_WITH('string1', 'stri')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStartsWithString2() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createStringValue(""))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStartsWithString3() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEndsWithString() {
String sql = "SELECT STARTS_WITH('string1', 'ng0')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEndsWithString2() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createStringValue(""))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEndsWithString3() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Does not support DateTime literal.")
public void testDateTimeLiteral() {
String sql = "SELECT DATETIME '2018-01-01 05:30:00.334'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Unsupported ResolvedLiteral type: DATETIME");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testConcatWithOneParameters() {
String sql = "SELECT concat('abc')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithTwoParameters() {
String sql = "SELECT concat('abc', 'def')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdef").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithThreeParameters() {
String sql = "SELECT concat('abc', 'def', 'xyz')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdefxyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithFourParameters() {
String sql = "SELECT concat('abc', 'def', ' ', 'xyz')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithFiveParameters() {
String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkk").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithSixParameters() {
String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk', 'ttt')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkkttt").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithNull1() {
String sql = "SELECT CONCAT(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createStringValue(""),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithNull2() {
String sql = "SELECT CONCAT(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNamedParameterQuery() {
String sql = "SELECT @ColA AS ColA";
ImmutableMap<String, Value> params = ImmutableMap.of("ColA", Value.createInt64Value(5));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(5L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testArrayStructLiteral() {
String sql = "SELECT ARRAY<STRUCT<INT64, INT64>>[(11, 12)];";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema innerSchema =
Schema.of(Field.of("s", FieldType.INT64), Field.of("i", FieldType.INT64));
final Schema schema =
Schema.of(Field.of("field1", FieldType.array(FieldType.row(innerSchema))));
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(ImmutableList.of(Row.withSchema(innerSchema).addValues(11L, 12L).build()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParameterStruct() {
String sql = "SELECT @p as ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p",
Value.createStructValue(
TypeFactory.createStructType(
ImmutableList.of(
new StructType.StructField(
"s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)),
new StructType.StructField(
"i", TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))),
ImmutableList.of(Value.createStringValue("foo"), Value.createInt64Value(1L))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema innerSchema =
Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64));
final Schema schema = Schema.of(Field.of("field1", FieldType.row(innerSchema)));
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(Row.withSchema(innerSchema).addValues("foo", 1L).build())
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParameterStructNested() {
String sql = "SELECT @outer_struct.inner_struct.s as ColA";
StructType innerStructType =
TypeFactory.createStructType(
ImmutableList.of(
new StructType.StructField(
"s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING))));
ImmutableMap<String, Value> params =
ImmutableMap.of(
"outer_struct",
Value.createStructValue(
TypeFactory.createStructType(
ImmutableList.of(new StructType.StructField("inner_struct", innerStructType))),
ImmutableList.of(
Value.createStructValue(
innerStructType, ImmutableList.of(Value.createStringValue("foo"))))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("foo").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatNamedParameterQuery() {
String sql = "SELECT CONCAT(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(""), "p1", Value.createStringValue("A"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("A").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatPositionalParameterQuery() {
String sql = "SELECT CONCAT(?, ?, ?) AS ColA";
ImmutableList<Value> params =
ImmutableList.of(
Value.createStringValue("a"),
Value.createStringValue("b"),
Value.createStringValue("c"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace1() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue(""),
"p1", Value.createStringValue(""),
"p2", Value.createStringValue("a"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace2() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abc"),
"p1", Value.createStringValue(""),
"p2", Value.createStringValue("xyz"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace3() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue(""),
"p1", Value.createStringValue(""),
"p2", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace4() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p2", Value.createStringValue(""));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTrim1() {
String sql = "SELECT trim(@p0)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(" a b c "));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTrim2() {
String sql = "SELECT trim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTrim3() {
String sql = "SELECT trim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLTrim1() {
String sql = "SELECT ltrim(@p0)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(" a b c "));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c ").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLTrim2() {
String sql = "SELECT ltrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyzab").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLTrim3() {
String sql = "SELECT ltrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testRTrim1() {
String sql = "SELECT rtrim(@p0)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(" a b c "));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(" a b c").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testRTrim2() {
String sql = "SELECT rtrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abxyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testRTrim3() {
String sql = "SELECT rtrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("https:
public void testCastBytesToString1() {
String sql = "SELECT CAST(@p0 AS STRING)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createBytesValue(ByteString.copyFromUtf8("`")));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("`").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastBytesToString2() {
String sql = "SELECT CAST(b'b' AS STRING)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("https:
public void testCastBytesToStringFromTable() {
String sql = "SELECT CAST(bytes_col AS STRING) FROM table_all_types";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("1").build(),
Row.withSchema(schema).addValues("2").build(),
Row.withSchema(schema).addValues("3").build(),
Row.withSchema(schema).addValues("4").build(),
Row.withSchema(schema).addValues("5").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastStringToTS() {
String sql = "SELECT CAST('2019-01-15 13:21:03' AS TIMESTAMP)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field_1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastStringToString() {
String sql = "SELECT CAST(@p0 AS STRING)";
ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(""));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastStringToInt64() {
String sql = "SELECT CAST(@p0 AS INT64)";
ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("123"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(123L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectConstant() {
String sql = "SELECT 'hi'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("hi").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Does not support DATE_ADD.")
public void testDateAddWithParameter() {
String sql =
"SELECT "
+ "DATE_ADD(@p0, INTERVAL @p1 DAY), "
+ "DATE_ADD(@p2, INTERVAL @p3 DAY), "
+ "DATE_ADD(@p4, INTERVAL @p5 YEAR), "
+ "DATE_ADD(@p6, INTERVAL @p7 DAY), "
+ "DATE_ADD(@p8, INTERVAL @p9 MONTH)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createDateValue(0))
.put("p1", Value.createInt64Value(2L))
.put("p2", parseDateToValue("2019-01-01"))
.put("p3", Value.createInt64Value(2L))
.put("p4", Value.createSimpleNullValue(TypeKind.TYPE_DATE))
.put("p5", Value.createInt64Value(1L))
.put("p6", parseDateToValue("2000-02-29"))
.put("p7", Value.createInt64Value(-365L))
.put("p8", parseDateToValue("1999-03-31"))
.put("p9", Value.createInt64Value(-1L))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addDateTimeField("field1")
.addDateTimeField("field2")
.addNullableField("field3", DATETIME)
.addDateTimeField("field4")
.addDateTimeField("field5")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
parseDate("1970-01-03"),
parseDate("2019-01-03"),
null,
parseDate("1999-03-01"),
parseDate("1999-02-28"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Does not support TIME_ADD.")
public void testTimeAddWithParameter() {
String sql = "SELECT TIME_ADD(@p0, INTERVAL @p1 SECOND)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", parseTimeToValue("12:13:14.123"),
"p1", Value.createInt64Value(1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues(parseTime("12:13:15.123")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampAddWithParameter1() {
String sql = "SELECT TIMESTAMP_ADD(@p0, INTERVAL @p1 MILLISECOND)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", parseTimestampWithTZToValue("2001-01-01 00:00:00+00"),
"p1", Value.createInt64Value(1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(parseTimestampWithTimeZone("2001-01-01 00:00:00.001+00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampAddWithParameter2() {
String sql = "SELECT TIMESTAMP_ADD(@p0, INTERVAL @p1 MINUTE)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", parseTimestampWithTZToValue("2008-12-25 15:30:00+07:30"),
"p1", Value.createInt64Value(10L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(parseTimestampWithTimeZone("2008-12-25 15:40:00+07:30"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-8593] ZetaSQL does not support Map type")
public void testSelectFromTableWithMap() {
String sql = "SELECT row_field FROM table_with_map";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema rowSchema = Schema.builder().addInt64Field("row_id").addStringField("data").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addRowField("row_field", rowSchema).build())
.addValues(Row.withSchema(rowSchema).addValues(1L, "data1").build())
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSubQuery() {
String sql = "select sum(Key) from KeyValue\n" + "group by (select Key)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("Does not support sub-queries");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testSubstr() {
String sql = "SELECT substr(@p0, @p1, @p2)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abc"),
"p1", Value.createInt64Value(-2L),
"p2", Value.createInt64Value(1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSubstrWithLargeValueExpectException() {
String sql = "SELECT substr(@p0, @p1, @p2)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abc"),
"p1", Value.createInt64Value(Integer.MAX_VALUE + 1L),
"p2", Value.createInt64Value(Integer.MIN_VALUE - 1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
thrown.expect(RuntimeException.class);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectAll() {
String sql = "SELECT ALL Key, Value FROM KeyValue;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectDistinct() {
String sql = "SELECT DISTINCT Key FROM aggregate_test_table;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L).build(),
Row.withSchema(schema).addValues(2L).build(),
Row.withSchema(schema).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectDistinct2() {
String sql =
"SELECT DISTINCT val.BYTES\n"
+ "from (select b\"BYTES\" BYTES union all\n"
+ " select b\"bytes\" union all\n"
+ " select b\"ByTeS\") val";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addByteArrayField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("BYTES".getBytes(StandardCharsets.UTF_8)).build(),
Row.withSchema(schema).addValues("ByTeS".getBytes(StandardCharsets.UTF_8)).build(),
Row.withSchema(schema).addValues("bytes".getBytes(StandardCharsets.UTF_8)).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectBytes() {
String sql = "SELECT b\"ByTes\"";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addByteArrayField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("ByTes".getBytes(StandardCharsets.UTF_8)).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectExcept() {
String sql = "SELECT * EXCEPT (Key, ts) FROM KeyValue;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234").build(),
Row.withSchema(schema).addValues("KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectReplace() {
String sql =
"WITH orders AS\n"
+ " (SELECT 5 as order_id,\n"
+ " \"sprocket\" as item_name,\n"
+ " 200 as quantity)\n"
+ "SELECT * REPLACE (\"widget\" AS item_name)\n"
+ "FROM orders";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addInt64Field("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues(5L, "widget", 200L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUnionAllBasic() {
String sql =
"SELECT row_id FROM table_all_types UNION ALL SELECT row_id FROM table_all_types_2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(1L).build(),
Row.withSchema(schema).addValue(2L).build(),
Row.withSchema(schema).addValue(3L).build(),
Row.withSchema(schema).addValue(4L).build(),
Row.withSchema(schema).addValue(5L).build(),
Row.withSchema(schema).addValue(6L).build(),
Row.withSchema(schema).addValue(7L).build(),
Row.withSchema(schema).addValue(8L).build(),
Row.withSchema(schema).addValue(9L).build(),
Row.withSchema(schema).addValue(10L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testAVGWithLongInput() {
String sql = "SELECT AVG(f_int_1) FROM aggregate_test_table;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage(
"AVG(LONG) is not supported. You might want to use AVG(CAST(expression AS DOUBLE).");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testReverseString() {
String sql = "SELECT REVERSE('abc');";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("cba").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCharLength() {
String sql = "SELECT CHAR_LENGTH('abc');";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCharLengthNull() {
String sql = "SELECT CHAR_LENGTH(@p0);";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field", FieldType.INT64).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTumbleAsTVF() {
String sql =
"select Key, Value, ts, window_start, window_end from "
+ "TUMBLE((select * from KeyValue), descriptor(ts), 'INTERVAL 1 SECOND')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
ImmutableMap<String, Value> params = ImmutableMap.of();
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("Key")
.addStringField("Value")
.addDateTimeField("ts")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
14L,
"KeyValue234",
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"))
.build(),
Row.withSchema(schema)
.addValues(
15L,
"KeyValue235",
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01T21:26:08"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNullTrueFalse() {
String sql =
"WITH Src AS (\n"
+ " SELECT NULL as data UNION ALL\n"
+ " SELECT TRUE UNION ALL\n"
+ " SELECT FALSE\n"
+ ")\n"
+ "SELECT\n"
+ " data IS NULL as isnull,\n"
+ " data IS NOT NULL as isnotnull,\n"
+ " data IS TRUE as istrue,\n"
+ " data IS NOT TRUE as isnottrue,\n"
+ " data IS FALSE as isfalse,\n"
+ " data IS NOT FALSE as isnotfalse\n"
+ "FROM Src\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
ImmutableMap<String, Value> params = ImmutableMap.of();
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("isnull", FieldType.BOOLEAN)
.addField("isnotnull", FieldType.BOOLEAN)
.addField("istrue", FieldType.BOOLEAN)
.addField("isnottrue", FieldType.BOOLEAN)
.addField("isfalse", FieldType.BOOLEAN)
.addField("isnotfalse", FieldType.BOOLEAN)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(true, false, false, true, false, true).build(),
Row.withSchema(schema).addValues(false, true, true, false, false, true).build(),
Row.withSchema(schema).addValues(false, true, false, true, true, false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBitOr() {
String sql = "SELECT BIT_OR(row_id) FROM table_all_types GROUP BY bool_col";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(3L).build(),
Row.withSchema(schema).addValue(7L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("NULL values don't work correctly. (https:
public void testZetaSQLBitAnd() {
String sql = "SELECT BIT_AND(row_id) FROM table_all_types GROUP BY bool_col";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(1L).build(),
Row.withSchema(schema).addValue(0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSimpleTableName() {
String sql = "SELECT Key FROM KeyValue";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema singleField = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(singleField).addValues(14L).build(),
Row.withSchema(singleField).addValues(15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
} | class ZetaSQLDialectSpecTest extends ZetaSQLTestBase {
@Rule public transient TestPipeline pipeline = TestPipeline.create();
@Rule public ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() {
initializeBeamTableProvider();
initializeCalciteEnvironment();
}
@Test
public void testSimpleSelect() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryPlannerClass() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
PCollection<Row> stream =
pipeline.apply(SqlTransform.query(sql).withQueryPlannerClass(ZetaSQLQueryPlanner.class));
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testPlannerNamePipelineOption() {
pipeline
.getOptions()
.as(BeamSqlPipelineOptions.class)
.setPlannerName("org.apache.beam.sdk.extensions.sql.zetasql.ZetaSQLQueryPlanner");
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
PCollection<Row> stream = pipeline.apply(SqlTransform.query(sql));
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testByteLiterals() {
String sql = "SELECT b'abc'";
byte[] byteString = new byte[] {'a', 'b', 'c'};
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BYTES).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(byteString).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testByteString() {
String sql = "SELECT @p0 IS NULL AS ColA";
ByteString byteString = ByteString.copyFrom(new byte[] {0x62});
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder().put("p0", Value.createBytesValue(byteString)).build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFloat() {
String sql = "SELECT 3.0";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.DOUBLE).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3.0).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStringLiterals() {
String sql = "SELECT '\"America/Los_Angeles\"\\n'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("\"America/Los_Angeles\"\n").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParameterString() {
String sql = "SELECT ?";
ImmutableList<Value> params = ImmutableList.of(Value.createStringValue("abc\n"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("ColA", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc\n").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testEQ1() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_BOOL))
.put("p1", Value.createBoolValue(true))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore(
"Does not support inf/-inf/nan in double/float literals because double/float literals are"
+ " converted to BigDecimal in Calcite codegen.")
public void testEQ2() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createDoubleValue(0))
.put("p1", Value.createDoubleValue(Double.POSITIVE_INFINITY))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addBooleanField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testEQ3() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE))
.put("p1", Value.createDoubleValue(3.14))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEQ4() {
String sql = "SELECT @p0 = @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createBytesValue(ByteString.copyFromUtf8("hello")))
.put("p1", Value.createBytesValue(ByteString.copyFromUtf8("hello")))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEQ5() {
String sql = "SELECT b'hello' = b'hello' AS ColA";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEQ6() {
String sql = "SELECT ? = ? AS ColA";
ImmutableList<Value> params =
ImmutableList.of(Value.createInt64Value(4L), Value.createInt64Value(5L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNotNull1() {
String sql = "SELECT @p0 IS NOT NULL AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNotNull2() {
String sql = "SELECT @p0 IS NOT NULL AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createNullValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNotNull3() {
String sql = "SELECT @p0 IS NOT NULL AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createNullValue(
TypeFactory.createStructType(
Arrays.asList(
new StructField(
"a", TypeFactory.createSimpleType(TypeKind.TYPE_STRING))))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfBasic() {
String sql = "SELECT IF(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createBoolValue(true),
"p1",
Value.createInt64Value(1),
"p2",
Value.createInt64Value(2));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfPositional() {
String sql = "SELECT IF(?, ?, ?) AS ColA";
ImmutableList<Value> params =
ImmutableList.of(
Value.createBoolValue(true), Value.createInt64Value(1), Value.createInt64Value(2));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.INT64).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceBasic() {
String sql = "SELECT COALESCE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1",
Value.createStringValue("yay"),
"p2",
Value.createStringValue("nay"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceSingleArgument() {
String sql = "SELECT COALESCE(@p0) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_INT64));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceNullArray() {
String sql = "SELECT COALESCE(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createNullValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))),
"p1",
Value.createNullValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.array(FieldType.INT64)).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testNullIfCoercion() {
String sql = "SELECT NULLIF(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createInt64Value(3L),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_DOUBLE));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.DOUBLE).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(3.0).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCoalesceNullStruct() {
String sql = "SELECT COALESCE(NULL, STRUCT(\"a\" AS s, -33 AS i))";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema innerSchema =
Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64));
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.row(innerSchema)).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(Row.withSchema(innerSchema).addValues("a", -33L).build())
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfTimestamp() {
String sql = "SELECT IF(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createBoolValue(false),
"p1",
Value.createTimestampValueFromUnixMicros(0),
"p2",
Value.createTimestampValueFromUnixMicros(
DateTime.parse("2019-01-01T00:00:00Z").getMillis() * 1000));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", DATETIME).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(DateTime.parse("2019-01-01T00:00:00Z")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("$make_array is not implemented")
public void testMakeArray() {
String sql = "SELECT [s3, s1, s2] FROM (SELECT \"foo\" AS s1, \"bar\" AS s2, \"baz\" AS s3);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addNullableField("field1", FieldType.array(FieldType.STRING)).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(ImmutableList.of("baz", "foo", "bar")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNullIfPositive() {
String sql = "SELECT NULLIF(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("null"), "p1", Value.createStringValue("null"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNullIfNegative() {
String sql = "SELECT NULLIF(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("foo"), "p1", Value.createStringValue("null"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfNullPositive() {
String sql = "SELECT IFNULL(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("foo"), "p1", Value.createStringValue("default"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("foo").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIfNullNegative() {
String sql = "SELECT IFNULL(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1",
Value.createStringValue("yay"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("yay").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEmptyArrayParameter() {
String sql = "SELECT @p0 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createArrayValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)),
ImmutableList.of()));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addArrayField("field1", FieldType.INT64).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEmptyArrayLiteral() {
String sql = "SELECT ARRAY<STRING>[];";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addArrayField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValue(ImmutableList.of()).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLike1() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("ab%"), "p1", Value.createStringValue("ab\\%"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9182] NULL parameters do not work in BeamZetaSqlCalcRel")
public void testLikeNullPattern() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createStringValue("ab%"),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLikeAllowsEscapingNonSpecialCharacter() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue("ab"), "p1", Value.createStringValue("\\ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLikeAllowsEscapingBackslash() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("a\\c"), "p1", Value.createStringValue("a\\\\c"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLikeBytes() {
String sql = "SELECT @p0 LIKE @p1 AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createBytesValue(ByteString.copyFromUtf8("abcd")),
"p1",
Value.createBytesValue(ByteString.copyFromUtf8("__%")));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testMod() {
String sql = "SELECT MOD(4, 2)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSimpleUnionAll() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING) "
+ " UNION ALL "
+ " SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build(),
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testThreeWayUnionAll() {
String sql = "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L).build(),
Row.withSchema(schema).addValues(2L).build(),
Row.withSchema(schema).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSimpleUnionDISTINCT() {
String sql =
"SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING) "
+ " UNION DISTINCT "
+ " SELECT CAST (1243 as INT64), "
+ "CAST ('2018-09-15 12:59:59.000000+00' as TIMESTAMP), "
+ "CAST ('string' as STRING);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addDateTimeField("field2")
.addStringField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1243L,
new DateTime(2018, 9, 15, 12, 59, 59, ISOChronology.getInstanceUTC()),
"string")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLInnerJoin() {
String sql =
"SELECT t1.Key "
+ "FROM KeyValue AS t1"
+ " INNER JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey AND t1.ts = t2.ts";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLInnerJoinWithUsing() {
String sql = "SELECT t1.Key " + "FROM KeyValue AS t1" + " INNER JOIN BigTable AS t2 USING(ts)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLInnerJoinTwo() {
String sql =
"SELECT t2.RowKey "
+ "FROM KeyValue AS t1"
+ " INNER JOIN BigTable AS t2"
+ " on "
+ " t2.RowKey = t1.Key AND t2.ts = t1.ts";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLLeftOuterJoin() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " LEFT JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schemaOne =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addNullableField("field4", FieldType.INT64)
.addNullableField("field5", FieldType.STRING)
.addNullableField("field6", DATETIME)
.build();
final Schema schemaTwo =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schemaOne)
.addValues(
14L,
"KeyValue234",
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
null,
null,
null)
.build(),
Row.withSchema(schemaTwo)
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLRightOuterJoin() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " RIGHT JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schemaOne =
Schema.builder()
.addNullableField("field1", FieldType.INT64)
.addNullableField("field2", FieldType.STRING)
.addNullableField("field3", DATETIME)
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
final Schema schemaTwo =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schemaOne)
.addValues(
null,
null,
null,
16L,
"BigTable236",
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schemaTwo)
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLFullOuterJoin() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " FULL JOIN BigTable AS t2"
+ " on "
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schemaOne =
Schema.builder()
.addNullableField("field1", FieldType.INT64)
.addNullableField("field2", FieldType.STRING)
.addNullableField("field3", DATETIME)
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
final Schema schemaTwo =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addInt64Field("field4")
.addStringField("field5")
.addDateTimeField("field6")
.build();
final Schema schemaThree =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.addNullableField("field4", FieldType.INT64)
.addNullableField("field5", FieldType.STRING)
.addNullableField("field6", DATETIME)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schemaOne)
.addValues(
null,
null,
null,
16L,
"BigTable236",
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schemaTwo)
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schemaThree)
.addValues(
14L,
"KeyValue234",
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
null,
null,
null)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("BeamSQL only supports equal join")
public void testZetaSQLFullOuterJoinTwo() {
String sql =
"SELECT * "
+ "FROM KeyValue AS t1"
+ " FULL JOIN BigTable AS t2"
+ " on "
+ " t1.Key + t2.RowKey = 30";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLFullOuterJoinFalse() {
String sql = "SELECT * FROM KeyValue AS t1 FULL JOIN BigTable AS t2 ON false";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
thrown.expect(UnsupportedOperationException.class);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
}
@Test
public void testZetaSQLThreeWayInnerJoin() {
String sql =
"SELECT t3.Value, t2.Value, t1.Value, t1.Key, t3.ColId FROM KeyValue as t1 "
+ "JOIN BigTable as t2 "
+ "ON (t1.Key = t2.RowKey) "
+ "JOIN Spanner as t3 "
+ "ON (t3.ColId = t1.Key)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addStringField("t3.Value")
.addStringField("t2.Value")
.addStringField("t1.Value")
.addInt64Field("t1.Key")
.addInt64Field("t3.ColId")
.build())
.addValues("Spanner235", "BigTable235", "KeyValue235", 15L, 15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLTableJoinOnItselfWithFiltering() {
String sql =
"SELECT * FROM Spanner as t1 "
+ "JOIN Spanner as t2 "
+ "ON (t1.ColId = t2.ColId) WHERE t1.ColId = 17";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addInt64Field("field3")
.addStringField("field4")
.build())
.addValues(17L, "Spanner237", 17L, "Spanner237")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromSelect() {
String sql = "SELECT * FROM (SELECT \"apple\" AS fruit, \"carrot\" AS vegetable);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder().addStringField("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("apple", "carrot").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(2, outputSchema.getFieldCount());
Assert.assertEquals("fruit", outputSchema.getField(0).getName());
Assert.assertEquals("vegetable", outputSchema.getField(1).getName());
}
@Test
public void testZetaSQLSelectFromTable() {
String sql = "SELECT Key, Value FROM KeyValue;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableLimit() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT 2;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableLimit0() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT 0;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectNullLimitParam() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT @lmt;";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Limit requires non-null count and offset");
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
}
@Test
public void testZetaSQLSelectNullOffsetParam() {
String sql = "SELECT Key, Value FROM KeyValue LIMIT 1 OFFSET @lmt;";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"lmt", Value.createNullValue(TypeFactory.createSimpleType(TypeKind.TYPE_INT64)));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Limit requires non-null count and offset");
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
}
@Test
public void testZetaSQLSelectFromTableOrderLimit() {
String sql =
"SELECT x, y FROM (SELECT 1 as x, 0 as y UNION ALL SELECT 0, 0 "
+ "UNION ALL SELECT 1, 0 UNION ALL SELECT 1, 1) ORDER BY x LIMIT 1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(0L, 0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableLimitOffset() {
String sql =
"SELECT COUNT(a) FROM (\n"
+ "SELECT a FROM (SELECT 1 a UNION ALL SELECT 2 UNION ALL SELECT 3) LIMIT 3 OFFSET 1);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableOrderByLimit() {
String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC LIMIT 2;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableOrderBy() {
String sql = "SELECT Key, Value FROM KeyValue ORDER BY Key DESC;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("ORDER BY without a LIMIT is not supported.");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testZetaSQLSelectFromTableWithStructType2() {
String sql =
"SELECT table_with_struct.struct_col.struct_col_str FROM table_with_struct WHERE id = 1;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("row_one").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInFilter() {
String sql =
"SELECT table_with_struct.id FROM table_with_struct WHERE"
+ " table_with_struct.struct_col.struct_col_str = 'row_one';";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue(1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInCast() {
String sql =
"SELECT CAST(table_with_struct.id AS STRING) FROM table_with_struct WHERE"
+ " table_with_struct.struct_col.struct_col_str = 'row_one';";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("1").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-9191] CAST operator does not work fully due to bugs in unparsing")
public void testZetaSQLStructFieldAccessInCast2() {
String sql =
"SELECT CAST(A.struct_col.struct_col_str AS TIMESTAMP) FROM table_with_struct_ts_string AS"
+ " A";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testAggregateWithAndWithoutColumnRefs() {
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
String sql =
"SELECT \n"
+ " id, \n"
+ " SUM(has_f1) as f1_count, \n"
+ " SUM(has_f2) as f2_count, \n"
+ " SUM(has_f3) as f3_count, \n"
+ " SUM(has_f4) as f4_count, \n"
+ " SUM(has_f5) as f5_count, \n"
+ " COUNT(*) as count, \n"
+ " SUM(has_f6) as f6_count \n"
+ "FROM (select 0 as id, 1 as has_f1, 2 as has_f2, 3 as has_f3, 4 as has_f4, 5 as has_f5, 6 as has_f6)\n"
+ "GROUP BY id";
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("id")
.addInt64Field("f1_count")
.addInt64Field("f2_count")
.addInt64Field("f3_count")
.addInt64Field("f4_count")
.addInt64Field("f5_count")
.addInt64Field("count")
.addInt64Field("f6_count")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(0L, 1L, 2L, 3L, 4L, 5L, 1L, 6L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInGroupBy() {
String sql = "SELECT rowCol.row_id, COUNT(*) FROM table_with_struct_two GROUP BY rowCol.row_id";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 1L).build(),
Row.withSchema(schema).addValues(2L, 1L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLAnyValueInGroupBy() {
String sql =
"SELECT rowCol.row_id as key, ANY_VALUE(rowCol.data) as any_value FROM table_with_struct_two GROUP BY rowCol.row_id";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Map<Long, List<String>> allowedTuples = new HashMap<>();
allowedTuples.put(1L, Arrays.asList("data1"));
allowedTuples.put(2L, Arrays.asList("data2"));
allowedTuples.put(3L, Arrays.asList("data2", "data3"));
PAssert.that(stream)
.satisfies(
input -> {
Iterator<Row> iter = input.iterator();
while (iter.hasNext()) {
Row row = iter.next();
List<String> values = allowedTuples.remove(row.getInt64("key"));
assertTrue(values != null);
assertTrue(values.contains(row.getString("any_value")));
}
assertTrue(allowedTuples.isEmpty());
return null;
});
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInGroupBy2() {
String sql =
"SELECT rowCol.data, MAX(rowCol.row_id), MIN(rowCol.row_id) FROM table_with_struct_two"
+ " GROUP BY rowCol.data";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addStringField("field1")
.addInt64Field("field2")
.addInt64Field("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("data1", 1L, 1L).build(),
Row.withSchema(schema).addValues("data2", 3L, 2L).build(),
Row.withSchema(schema).addValues("data3", 3L, 3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLStructFieldAccessInnerJoin() {
String sql =
"SELECT A.rowCol.data FROM table_with_struct_two AS A INNER JOIN "
+ "table_with_struct AS B "
+ "ON A.rowCol.row_id = B.id";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue("data1").build(),
Row.withSchema(schema).addValue("data2").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectFromTableWithArrayType() {
String sql = "SELECT array_col FROM table_with_array;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addArrayField("field", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(Arrays.asList("1", "2", "3")).build(),
Row.withSchema(schema).addValue(ImmutableList.of()).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLSelectStarFromTable() {
String sql = "SELECT * FROM BigTable;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addDateTimeField("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schema)
.addValues(
16L,
"BigTable236",
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFiltering() {
String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder().addInt64Field("field1").addStringField("field2").build())
.addValues(14L, "KeyValue234")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFilteringTwo() {
String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 AND Value = 'non-existing';";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFilteringThree() {
String sql = "SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLCountOnAColumn() {
String sql = "SELECT COUNT(Key) FROM KeyValue";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLAggDistinct() {
String sql = "SELECT Key, COUNT(DISTINCT Value) FROM KeyValue GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Does not support COUNT DISTINCT");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testZetaSQLBasicAgg() {
String sql = "SELECT Key, COUNT(*) FROM KeyValue GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, 1L).build(),
Row.withSchema(schema).addValues(15L, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLColumnAlias1() {
String sql = "SELECT Key, COUNT(*) AS count_col FROM KeyValue GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(2, outputSchema.getFieldCount());
Assert.assertEquals("Key", outputSchema.getField(0).getName());
Assert.assertEquals("count_col", outputSchema.getField(1).getName());
}
@Test
public void testZetaSQLColumnAlias2() {
String sql =
"SELECT Key AS k1, (count_col + 1) AS k2 FROM (SELECT Key, COUNT(*) AS count_col FROM"
+ " KeyValue GROUP BY Key)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(2, outputSchema.getFieldCount());
Assert.assertEquals("k1", outputSchema.getField(0).getName());
Assert.assertEquals("k2", outputSchema.getField(1).getName());
}
@Test
public void testZetaSQLColumnAlias3() {
String sql = "SELECT Key AS v1, Value AS v2, ts AS v3 FROM KeyValue";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(3, outputSchema.getFieldCount());
Assert.assertEquals("v1", outputSchema.getField(0).getName());
Assert.assertEquals("v2", outputSchema.getField(1).getName());
Assert.assertEquals("v3", outputSchema.getField(2).getName());
}
@Test
public void testZetaSQLColumnAlias4() {
String sql = "SELECT CAST(123 AS INT64) AS cast_col";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
Schema outputSchema = stream.getSchema();
Assert.assertEquals(1, outputSchema.getFieldCount());
Assert.assertEquals("cast_col", outputSchema.getField(0).getName());
}
@Test
public void testZetaSQLAmbiguousAlias() {
String sql = "SELECT row_id as ID, int64_col as ID FROM table_all_types GROUP BY ID;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expectMessage(
"Name ID in GROUP BY clause is ambiguous; it may refer to multiple columns in the"
+ " SELECT-list [at 1:68]");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testZetaSQLAggWithOrdinalReference() {
String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY 1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 2L).build(),
Row.withSchema(schema).addValues(2L, 3L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLAggWithAliasReference() {
String sql = "SELECT Key AS K, COUNT(*) FROM aggregate_test_table GROUP BY K";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 2L).build(),
Row.withSchema(schema).addValues(2L, 3L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg2() {
String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 2L).build(),
Row.withSchema(schema).addValues(2L, 3L).build(),
Row.withSchema(schema).addValues(3L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg3() {
String sql = "SELECT Key, Key2, COUNT(*) FROM aggregate_test_table GROUP BY Key2, Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field3")
.addInt64Field("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L).build(),
Row.withSchema(schema).addValues(1L, 11L, 1L).build(),
Row.withSchema(schema).addValues(2L, 11L, 2L).build(),
Row.withSchema(schema).addValues(2L, 12L, 1L).build(),
Row.withSchema(schema).addValues(3L, 13L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg4() {
String sql =
"SELECT Key, Key2, MAX(f_int_1), MIN(f_int_1), SUM(f_int_1), SUM(f_double_1) "
+ "FROM aggregate_test_table GROUP BY Key2, Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field3")
.addInt64Field("field2")
.addInt64Field("field4")
.addInt64Field("field5")
.addDoubleField("field6")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L, 1L, 1L, 1.0).build(),
Row.withSchema(schema).addValues(1L, 11L, 2L, 2L, 2L, 2.0).build(),
Row.withSchema(schema).addValues(2L, 11L, 4L, 3L, 7L, 7.0).build(),
Row.withSchema(schema).addValues(2L, 12L, 5L, 5L, 5L, 5.0).build(),
Row.withSchema(schema).addValues(3L, 13L, 7L, 6L, 13L, 13.0).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicAgg5() {
String sql =
"SELECT Key, Key2, AVG(CAST(f_int_1 AS FLOAT64)), AVG(f_double_1) "
+ "FROM aggregate_test_table GROUP BY Key2, Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field2")
.addDoubleField("field3")
.addDoubleField("field4")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1.0, 1.0).build(),
Row.withSchema(schema).addValues(1L, 11L, 2.0, 2.0).build(),
Row.withSchema(schema).addValues(2L, 11L, 3.5, 3.5).build(),
Row.withSchema(schema).addValues(2L, 12L, 5.0, 5.0).build(),
Row.withSchema(schema).addValues(3L, 13L, 6.5, 6.5).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore(
"Calcite infers return type of AVG(int64) as BIGINT while ZetaSQL requires it as either"
+ " NUMERIC or DOUBLE/FLOAT64")
public void testZetaSQLTestAVG() {
String sql = "SELECT Key, AVG(f_int_1)" + "FROM aggregate_test_table GROUP BY Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field2")
.addInt64Field("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L).build(),
Row.withSchema(schema).addValues(1L, 11L, 6L).build(),
Row.withSchema(schema).addValues(2L, 11L, 6L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLGroupByExprInSelect() {
String sql = "SELECT int64_col + 1 FROM table_all_types GROUP BY int64_col + 1;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(0L).build(),
Row.withSchema(schema).addValue(-1L).build(),
Row.withSchema(schema).addValue(-2L).build(),
Row.withSchema(schema).addValue(-3L).build(),
Row.withSchema(schema).addValue(-4L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLGroupByAndFiltering() {
String sql = "SELECT int64_col FROM table_all_types WHERE int64_col = 1 GROUP BY int64_col;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLGroupByAndFilteringOnNonGroupByColumn() {
String sql = "SELECT int64_col FROM table_all_types WHERE double_col = 0.5 GROUP BY int64_col;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(-5L).build(),
Row.withSchema(schema).addValue(-4L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicHaving() {
String sql = "SELECT Key, COUNT(*) FROM aggregate_test_table GROUP BY Key HAVING COUNT(*) > 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(2L, 3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLHavingNull() {
String sql = "SELECT SUM(int64_val) FROM all_null_table GROUP BY primary_key HAVING false";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream).empty();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBasicFixedWindowing() {
String sql =
"SELECT "
+ "COUNT(*) as field_count, "
+ "TUMBLE_START(\"INTERVAL 1 SECOND\") as window_start, "
+ "TUMBLE_END(\"INTERVAL 1 SECOND\") as window_end "
+ "FROM KeyValue "
+ "GROUP BY TUMBLE(ts, \"INTERVAL 1 SECOND\");";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("count_start")
.addDateTimeField("field1")
.addDateTimeField("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
1L,
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 8, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schema)
.addValues(
1L,
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryOne() {
String sql =
"SELECT a.Value, a.Key FROM (SELECT Key, Value FROM KeyValue WHERE Key = 14 OR Key = 15)"
+ " as a;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryTwo() {
String sql =
"SELECT a.Key, a.Key2, COUNT(*) FROM "
+ " (SELECT * FROM aggregate_test_table WHERE Key != 10) as a "
+ " GROUP BY a.Key2, a.Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addInt64Field("field3")
.addInt64Field("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L, 10L, 1L).build(),
Row.withSchema(schema).addValues(1L, 11L, 1L).build(),
Row.withSchema(schema).addValues(2L, 11L, 2L).build(),
Row.withSchema(schema).addValues(2L, 12L, 1L).build(),
Row.withSchema(schema).addValues(3L, 13L, 2L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryThree() {
String sql =
"SELECT * FROM (SELECT * FROM KeyValue) AS t1 INNER JOIN (SELECT * FROM BigTable) AS t2 on"
+ " t1.Key = t2.RowKey";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addInt64Field("Key")
.addStringField("Value")
.addDateTimeField("ts")
.addInt64Field("RowKey")
.addStringField("Value2")
.addDateTimeField("ts2")
.build())
.addValues(
15L,
"KeyValue235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()),
15L,
"BigTable235",
new DateTime(2018, 7, 1, 21, 26, 7, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLNestedQueryFive() {
String sql =
"SELECT a.Value, a.Key FROM (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15)"
+ " as a;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateLiteral() {
String sql = "SELECT DATE '2020-3-30'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2020, 3, 30))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateColumn() {
String sql = "SELECT FORMAT_DATE('%b-%d-%Y', date_field) FROM table_with_date";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_date_str").build())
.addValues("Dec-25-2008")
.build(),
Row.withSchema(Schema.builder().addStringField("f_date_str").build())
.addValues("Apr-07-2020")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractDate() {
String sql =
"WITH Dates AS (\n"
+ " SELECT DATE '2015-12-31' AS date UNION ALL\n"
+ " SELECT DATE '2016-01-01'\n"
+ ")\n"
+ "SELECT\n"
+ " EXTRACT(ISOYEAR FROM date) AS isoyear,\n"
+ " EXTRACT(YEAR FROM date) AS year,\n"
+ " EXTRACT(ISOWEEK FROM date) AS isoweek,\n"
+ " EXTRACT(MONTH FROM date) AS month\n"
+ "FROM Dates\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("isoyear", FieldType.INT64)
.addField("year", FieldType.INT64)
.addField("isoweek", FieldType.INT64)
.addField("month", FieldType.INT64)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(2015L, 2015L, 53L /* , 52L */, 12L).build(),
Row.withSchema(schema).addValues(2015L, 2016L, 53L /* , 0L */, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateFromYearMonthDay() {
String sql = "SELECT DATE(2008, 12, 25)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2008, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateFromTimestamp() {
String sql = "SELECT DATE(TIMESTAMP '2016-12-25 05:30:00+07', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2016, 12, 24))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateAdd() {
String sql =
"SELECT "
+ "DATE_ADD(DATE '2008-12-25', INTERVAL 5 DAY), "
+ "DATE_ADD(DATE '2008-12-25', INTERVAL 1 MONTH), "
+ "DATE_ADD(DATE '2008-12-25', INTERVAL 1 YEAR), ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_date1", SqlTypes.DATE)
.addLogicalTypeField("f_date2", SqlTypes.DATE)
.addLogicalTypeField("f_date3", SqlTypes.DATE)
.build())
.addValues(
LocalDate.of(2008, 12, 30),
LocalDate.of(2009, 1, 25),
LocalDate.of(2009, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateSub() {
String sql =
"SELECT "
+ "DATE_SUB(DATE '2008-12-25', INTERVAL 5 DAY), "
+ "DATE_SUB(DATE '2008-12-25', INTERVAL 1 MONTH), "
+ "DATE_SUB(DATE '2008-12-25', INTERVAL 1 YEAR), ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_date1", SqlTypes.DATE)
.addLogicalTypeField("f_date2", SqlTypes.DATE)
.addLogicalTypeField("f_date3", SqlTypes.DATE)
.build())
.addValues(
LocalDate.of(2008, 12, 20),
LocalDate.of(2008, 11, 25),
LocalDate.of(2007, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateDiff() {
String sql = "SELECT DATE_DIFF(DATE '2010-07-07', DATE '2008-12-25', DAY)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_date_diff").build())
.addValues(559L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateDiffNegativeResult() {
String sql = "SELECT DATE_DIFF(DATE '2017-12-17', DATE '2017-12-18', ISOWEEK)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_date_diff").build())
.addValues(-1L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateTrunc() {
String sql = "SELECT DATE_TRUNC(DATE '2015-06-15', ISOYEAR)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder().addLogicalTypeField("f_date_trunc", SqlTypes.DATE).build())
.addValues(LocalDate.of(2014, 12, 29))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFormatDate() {
String sql = "SELECT FORMAT_DATE('%b-%d-%Y', DATE '2008-12-25')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_date_str").build())
.addValues("Dec-25-2008")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParseDate() {
String sql = "SELECT PARSE_DATE('%m %d %y', '10 14 18')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2018, 10, 14))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateToUnixInt64() {
String sql = "SELECT UNIX_DATE(DATE '2008-12-25')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_unix_date").build())
.addValues(14238L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDateFromUnixInt64() {
String sql = "SELECT DATE_FROM_UNIX_DATE(14238)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_date", SqlTypes.DATE).build())
.addValues(LocalDate.of(2008, 12, 25))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeLiteral() {
String sql = "SELECT TIME '15:30:00', TIME '15:30:00.135246' ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_time1", SqlTypes.TIME)
.addLogicalTypeField("f_time2", SqlTypes.TIME)
.build())
.addValues(LocalTime.of(15, 30, 0))
.addValues(LocalTime.of(15, 30, 0, 135246000))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeColumn() {
String sql = "SELECT FORMAT_TIME('%T', time_field) FROM table_with_time";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_time_str").build())
.addValues("15:30:00")
.build(),
Row.withSchema(Schema.builder().addStringField("f_time_str").build())
.addValues("23:35:59")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractTime() {
String sql =
"SELECT "
+ "EXTRACT(HOUR FROM TIME '15:30:35.123456') as hour, "
+ "EXTRACT(MINUTE FROM TIME '15:30:35.123456') as minute, "
+ "EXTRACT(SECOND FROM TIME '15:30:35.123456') as second, "
+ "EXTRACT(MILLISECOND FROM TIME '15:30:35.123456') as millisecond, "
+ "EXTRACT(MICROSECOND FROM TIME '15:30:35.123456') as microsecond ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("hour", FieldType.INT64)
.addField("minute", FieldType.INT64)
.addField("second", FieldType.INT64)
.addField("millisecond", FieldType.INT64)
.addField("microsecond", FieldType.INT64)
.build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues(15L, 30L, 35L, 123L, 123456L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeFromHourMinuteSecond() {
String sql = "SELECT TIME(15, 30, 0)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_time", SqlTypes.TIME).build())
.addValues(LocalTime.of(15, 30, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeFromTimestamp() {
String sql = "SELECT TIME(TIMESTAMP '2008-12-25 15:30:00+08', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_time", SqlTypes.TIME).build())
.addValues(LocalTime.of(23, 30, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeAdd() {
String sql =
"SELECT "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 MICROSECOND), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 MILLISECOND), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 SECOND), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 MINUTE), "
+ "TIME_ADD(TIME '15:30:00', INTERVAL 10 HOUR) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_time1", SqlTypes.TIME)
.addLogicalTypeField("f_time2", SqlTypes.TIME)
.addLogicalTypeField("f_time3", SqlTypes.TIME)
.addLogicalTypeField("f_time4", SqlTypes.TIME)
.addLogicalTypeField("f_time5", SqlTypes.TIME)
.build())
.addValues(
LocalTime.of(15, 30, 0, 10000),
LocalTime.of(15, 30, 0, 10000000),
LocalTime.of(15, 30, 10, 0),
LocalTime.of(15, 40, 0, 0),
LocalTime.of(1, 30, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeSub() {
String sql =
"SELECT "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 MICROSECOND), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 MILLISECOND), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 SECOND), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 MINUTE), "
+ "TIME_SUB(TIME '15:30:00', INTERVAL 10 HOUR) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("f_time1", SqlTypes.TIME)
.addLogicalTypeField("f_time2", SqlTypes.TIME)
.addLogicalTypeField("f_time3", SqlTypes.TIME)
.addLogicalTypeField("f_time4", SqlTypes.TIME)
.addLogicalTypeField("f_time5", SqlTypes.TIME)
.build())
.addValues(
LocalTime.of(15, 29, 59, 999990000),
LocalTime.of(15, 29, 59, 990000000),
LocalTime.of(15, 29, 50, 0),
LocalTime.of(15, 20, 0, 0),
LocalTime.of(5, 30, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeDiff() {
String sql = "SELECT TIME_DIFF(TIME '15:30:00', TIME '14:35:00', MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_time_diff").build())
.addValues(55L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeDiffNegativeResult() {
String sql = "SELECT TIME_DIFF(TIME '14:35:00', TIME '15:30:00', MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_time_diff").build())
.addValues(-55L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimeTrunc() {
String sql = "SELECT TIME_TRUNC(TIME '15:30:35', HOUR)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder().addLogicalTypeField("f_time_trunc", SqlTypes.TIME).build())
.addValues(LocalTime.of(15, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFormatTime() {
String sql = "SELECT FORMAT_TIME('%R', TIME '15:30:00')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_time_str").build())
.addValues("15:30")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParseTime() {
String sql = "SELECT PARSE_TIME('%H', '15')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addLogicalTypeField("f_time", SqlTypes.TIME).build())
.addValues(LocalTime.of(15, 0, 0))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("https:
public void testCastBetweenTimeAndString() {
String sql =
"SELECT CAST(s1 as TIME) as t2, CAST(t1 as STRING) as s2 FROM "
+ "(SELECT '12:34:56.123456' as s1, TIME '12:34:56.123456' as t1)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addLogicalTypeField("t2", SqlTypes.TIME)
.addStringField("s2")
.build())
.addValues(LocalTime.of(12, 34, 56, 123456000), "12:34:56.123456")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampMicrosecondUnsupported() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2000-01-01 00:11:22.345678+00' as timestamp\n"
+ ")\n"
+ "SELECT\n"
+ " timestamp,\n"
+ " EXTRACT(ISOYEAR FROM timestamp) AS isoyear,\n"
+ " EXTRACT(YEAR FROM timestamp) AS year,\n"
+ " EXTRACT(ISOWEEK FROM timestamp) AS week,\n"
+ " EXTRACT(MINUTE FROM timestamp) AS minute\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testTimestampLiteralWithoutTimeZone() {
String sql = "SELECT TIMESTAMP '2016-12-25 05:30:00'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("field1").build())
.addValues(parseTimestampWithUTCTimeZone("2016-12-25 05:30:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampLiteralWithUTCTimeZone() {
String sql = "SELECT TIMESTAMP '2016-12-25 05:30:00+00'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("field1").build())
.addValues(parseTimestampWithUTCTimeZone("2016-12-25 05:30:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullIntersectDistinct() {
String sql = "SELECT NULL INTERSECT DISTINCT SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
System.err.println("SCHEMA " + stream.getSchema());
PAssert.that(stream).empty();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullIntersectAll() {
String sql = "SELECT NULL INTERSECT ALL SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
System.err.println("SCHEMA " + stream.getSchema());
PAssert.that(stream).empty();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullExceptDistinct() {
String sql = "SELECT NULL EXCEPT DISTINCT SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema()));
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNullExceptAll() {
String sql = "SELECT NULL EXCEPT ALL SELECT 2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder(Row.nullRow(stream.getSchema()));
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Test
public void testAlreadyDefinedUDFThrowsException() {
String sql = "CREATE FUNCTION foo() AS (0); CREATE FUNCTION foo() AS (1); SELECT foo();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(ParseException.class);
thrown.expectMessage("Failed to define function foo");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testCreateFunctionNoSelectThrowsException() {
String sql = "CREATE FUNCTION plusOne(x INT64) AS (x + 1);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("Statement list must end in a SELECT statement, not CreateFunctionStmt");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testNullaryUdf() {
String sql = "CREATE FUNCTION zero() AS (0); SELECT zero();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testQualifiedNameUdfUnqualifiedCall() {
String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT baz();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore(
"Qualified paths can't be resolved due to a bug in ZetaSQL: "
+ "https:
public void testQualifiedNameUdfQualifiedCallThrowsException() {
String sql = "CREATE FUNCTION foo.bar.baz() AS (\"uwu\"); SELECT foo.bar.baz();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("x").build()).addValue("uwu").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUnaryUdf() {
String sql = "CREATE FUNCTION triple(x INT64) AS (3 * x); SELECT triple(triple(1));";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUdfWithinUdf() {
String sql =
"CREATE FUNCTION triple(x INT64) AS (3 * x);"
+ " CREATE FUNCTION nonuple(x INT64) as (triple(triple(x)));"
+ " SELECT nonuple(1);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("x").build()).addValue(9L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUndefinedUdfThrowsException() {
String sql =
"CREATE FUNCTION foo() AS (bar()); "
+ "CREATE FUNCTION bar() AS (foo()); "
+ "SELECT foo();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(SqlException.class);
thrown.expectMessage("Function not found: bar");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testRecursiveUdfThrowsException() {
String sql = "CREATE FUNCTION omega() AS (omega()); SELECT omega();";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(SqlException.class);
thrown.expectMessage("Function not found: omega");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testTimestampLiteralWithNonUTCTimeZone() {
String sql = "SELECT TIMESTAMP '2018-12-10 10:38:59-10:00'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp_with_time_zone").build())
.addValues(parseTimestampWithTimeZone("2018-12-10 10:38:59-1000"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractTimestamp() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2007-12-31 12:34:56' AS timestamp UNION ALL\n"
+ " SELECT TIMESTAMP '2009-12-31'\n"
+ ")\n"
+ "SELECT\n"
+ " EXTRACT(ISOYEAR FROM timestamp) AS isoyear,\n"
+ " EXTRACT(YEAR FROM timestamp) AS year,\n"
+ " EXTRACT(ISOWEEK FROM timestamp) AS isoweek,\n"
+ " EXTRACT(MINUTE FROM timestamp) AS minute\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("isoyear", FieldType.INT64)
.addField("year", FieldType.INT64)
.addField("isoweek", FieldType.INT64)
.addField("minute", FieldType.INT64)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(2008L, 2007L, 1L /* , 53L */, 34L).build(),
Row.withSchema(schema).addValues(2009L, 2009L, 53L /* , 52L */, 0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExtractTimestampAtTimeZoneUnsupported() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2017-05-26' AS timestamp\n"
+ ")\n"
+ "SELECT\n"
+ " timestamp,\n"
+ " EXTRACT(HOUR FROM timestamp AT TIME ZONE 'America/Vancouver') AS hour,\n"
+ " EXTRACT(DAY FROM timestamp AT TIME ZONE 'America/Vancouver') AS day\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testExtractDateFromTimestampUnsupported() {
String sql =
"WITH Timestamps AS (\n"
+ " SELECT TIMESTAMP '2017-05-26' AS ts\n"
+ ")\n"
+ "SELECT\n"
+ " ts,\n"
+ " EXTRACT(DATE FROM ts) AS dt\n"
+ "FROM Timestamps\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(SqlException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testStringFromTimestamp() {
String sql = "SELECT STRING(TIMESTAMP '2008-12-25 15:30:00', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_timestamp_string").build())
.addValues("2008-12-25 07:30:00-08")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampFromString() {
String sql = "SELECT TIMESTAMP('2008-12-25 15:30:00', 'America/Los_Angeles')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp").build())
.addValues(parseTimestampWithTimeZone("2008-12-25 15:30:00-08"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampAdd() {
String sql =
"SELECT "
+ "TIMESTAMP_ADD(TIMESTAMP '2008-12-25 15:30:00 UTC', INTERVAL 5+5 MINUTE), "
+ "TIMESTAMP_ADD(TIMESTAMP '2008-12-25 15:30:00+07:30', INTERVAL 10 MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_add")
.addDateTimeField("f_timestamp_with_time_zone_add")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:40:00"),
parseTimestampWithTimeZone("2008-12-25 15:40:00+0730"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampSub() {
String sql =
"SELECT "
+ "TIMESTAMP_SUB(TIMESTAMP '2008-12-25 15:30:00 UTC', INTERVAL 5+5 MINUTE), "
+ "TIMESTAMP_SUB(TIMESTAMP '2008-12-25 15:30:00+07:30', INTERVAL 10 MINUTE)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_sub")
.addDateTimeField("f_timestamp_with_time_zone_sub")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:20:00"),
parseTimestampWithTimeZone("2008-12-25 15:20:00+0730"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampDiff() {
String sql =
"SELECT TIMESTAMP_DIFF("
+ "TIMESTAMP '2018-10-14 15:30:00.000 UTC', "
+ "TIMESTAMP '2018-08-14 15:05:00.001 UTC', "
+ "MILLISECOND)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_timestamp_diff").build())
.addValues((61L * 24 * 60 + 25) * 60 * 1000 - 1)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampDiffNegativeResult() {
String sql = "SELECT TIMESTAMP_DIFF(TIMESTAMP '2018-08-14', TIMESTAMP '2018-10-14', DAY)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("f_timestamp_diff").build())
.addValues(-61L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampTrunc() {
String sql = "SELECT TIMESTAMP_TRUNC(TIMESTAMP '2017-11-06 00:00:00+12', ISOWEEK, 'UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp_trunc").build())
.addValues(DateTimeUtils.parseTimestampWithUTCTimeZone("2017-10-30 00:00:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testFormatTimestamp() {
String sql = "SELECT FORMAT_TIMESTAMP('%D %T', TIMESTAMP '2018-10-14 15:30:00.123+00', 'UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("f_timestamp_str").build())
.addValues("10/14/18 15:30:00")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParseTimestamp() {
String sql = "SELECT PARSE_TIMESTAMP('%m-%d-%y %T', '10-14-18 15:30:00', 'UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addDateTimeField("f_timestamp").build())
.addValues(DateTimeUtils.parseTimestampWithUTCTimeZone("2018-10-14 15:30:00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampFromInt64() {
String sql = "SELECT TIMESTAMP_SECONDS(1230219000), TIMESTAMP_MILLIS(1230219000123) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_seconds")
.addDateTimeField("f_timestamp_millis")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00.123"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampToUnixInt64() {
String sql =
"SELECT "
+ "UNIX_SECONDS(TIMESTAMP '2008-12-25 15:30:00 UTC'), "
+ "UNIX_MILLIS(TIMESTAMP '2008-12-25 15:30:00.123 UTC')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addInt64Field("f_unix_seconds")
.addInt64Field("f_unix_millis")
.build())
.addValues(1230219000L, 1230219000123L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampFromUnixInt64() {
String sql =
"SELECT "
+ "TIMESTAMP_FROM_UNIX_SECONDS(1230219000), "
+ "TIMESTAMP_FROM_UNIX_MILLIS(1230219000123) ";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(
Schema.builder()
.addDateTimeField("f_timestamp_seconds")
.addDateTimeField("f_timestamp_millis")
.build())
.addValues(
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2008-12-25 15:30:00.123"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDistinct() {
String sql = "SELECT DISTINCT Key2 FROM aggregate_test_table";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addInt64Field("Key2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(10L).build(),
Row.withSchema(schema).addValues(11L).build(),
Row.withSchema(schema).addValues(12L).build(),
Row.withSchema(schema).addValues(13L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testDistinctOnNull() {
String sql = "SELECT DISTINCT str_val FROM all_null_table";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addNullableField("str_val", FieldType.DOUBLE).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testAnyValue() {
String sql = "SELECT ANY_VALUE(double_val) FROM all_null_table";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addNullableField("double_val", FieldType.DOUBLE).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectNULL() {
String sql = "SELECT NULL";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addNullableField("long_val", FieldType.INT64).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryOne() {
String sql =
"With T1 AS (SELECT * FROM KeyValue), T2 AS (SELECT * FROM BigTable) SELECT T2.RowKey FROM"
+ " T1 INNER JOIN T2 on T1.Key = T2.RowKey;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addInt64Field("field1").build())
.addValues(15L)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryTwo() {
String sql =
"WITH T1 AS (SELECT Key, COUNT(*) as value FROM KeyValue GROUP BY Key) SELECT T1.Key,"
+ " T1.value FROM T1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, 1L).build(),
Row.withSchema(schema).addValues(15L, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryThree() {
String sql =
"WITH T1 as (SELECT Value, Key FROM KeyValue WHERE Key = 14 OR Key = 15) SELECT T1.Value,"
+ " T1.Key FROM T1;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryFour() {
String sql =
"WITH T1 as (SELECT Value, Key FROM KeyValue) SELECT T1.Value, T1.Key FROM T1 WHERE T1.Key"
+ " = 14 OR T1.Key = 15;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234", 14L).build(),
Row.withSchema(schema).addValues("KeyValue235", 15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQueryFive() {
String sql =
"WITH T1 AS (SELECT * FROM KeyValue) SELECT T1.Key, COUNT(*) FROM T1 GROUP BY T1.Key";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addInt64Field("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, 1L).build(),
Row.withSchema(schema).addValues(15L, 1L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testWithQuerySix() {
String sql =
"WITH T1 AS (SELECT * FROM window_test_table_two) SELECT "
+ "COUNT(*) as field_count, "
+ "SESSION_START(\"INTERVAL 3 SECOND\") as window_start, "
+ "SESSION_END(\"INTERVAL 3 SECOND\") as window_end "
+ "FROM T1 "
+ "GROUP BY SESSION(ts, \"INTERVAL 3 SECOND\");";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("count_star")
.addDateTimeField("field1")
.addDateTimeField("field2")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
2L,
new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 12, ISOChronology.getInstanceUTC()))
.build(),
Row.withSchema(schema)
.addValues(
2L,
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()),
new DateTime(2018, 7, 1, 21, 26, 6, ISOChronology.getInstanceUTC()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUNNESTLiteral() {
String sql = "SELECT * FROM UNNEST(ARRAY<STRING>['foo', 'bar']);";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("str_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("foo").build(),
Row.withSchema(schema).addValues("bar").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUNNESTParameters() {
String sql = "SELECT * FROM UNNEST(@p0);";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createArrayValue(
TypeFactory.createArrayType(TypeFactory.createSimpleType(TypeKind.TYPE_STRING)),
ImmutableList.of(Value.createStringValue("foo"), Value.createStringValue("bar"))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("str_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("foo").build(),
Row.withSchema(schema).addValues("bar").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("BEAM-9515")
public void testUNNESTExpression() {
String sql = "SELECT * FROM UNNEST(ARRAY(SELECT Value FROM KeyValue));";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("str_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234").build(),
Row.withSchema(schema).addValues("KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNamedUNNESTLiteral() {
String sql = "SELECT *, T1 FROM UNNEST(ARRAY<STRING>['foo', 'bar']) AS T1";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema =
Schema.builder().addStringField("str_field").addStringField("str2_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("foo", "foo").build(),
Row.withSchema(schema).addValues("bar", "bar").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNamedUNNESTLiteralOffset() {
String sql = "SELECT x, p FROM UNNEST([3, 4]) AS x WITH OFFSET p";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
thrown.expect(UnsupportedOperationException.class);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
}
@Test
public void testUnnestArrayColumn() {
String sql =
"SELECT p FROM table_with_array_for_unnest, UNNEST(table_with_array_for_unnest.int_array_col) as p";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addInt64Field("int_field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(14L).build(),
Row.withSchema(schema).addValue(18L).build(),
Row.withSchema(schema).addValue(22L).build(),
Row.withSchema(schema).addValue(24L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStringAggregation() {
String sql =
"SELECT STRING_AGG(fruit) AS string_agg"
+ " FROM UNNEST([\"apple\", \"pear\", \"banana\", \"pear\"]) AS fruit";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema schema = Schema.builder().addStringField("string_field").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValue("apple,pear,banana,pear").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Seeing exception in Beam, need further investigation on the cause of this failed query.")
public void testNamedUNNESTJoin() {
String sql =
"SELECT * "
+ "FROM table_with_array_for_unnest AS t1"
+ " LEFT JOIN UNNEST(t1.int_array_col) AS t2"
+ " on "
+ " t1.int_col = t2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUnnestJoinStruct() {
String sql =
"SELECT b, x FROM UNNEST("
+ "[STRUCT(true AS b, [3, 5] AS arr), STRUCT(false AS b, [7, 9] AS arr)]) t "
+ "LEFT JOIN UNNEST(t.arr) x ON b";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testUnnestJoinLiteral() {
String sql =
"SELECT a, b "
+ "FROM UNNEST([1, 1, 2, 3, 5, 8, 13, NULL]) a "
+ "JOIN UNNEST([1, 2, 3, 5, 7, 11, 13, NULL]) b "
+ "ON a = b";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testUnnestJoinSubquery() {
String sql =
"SELECT a, b "
+ "FROM UNNEST([1, 2, 3]) a "
+ "JOIN UNNEST(ARRAY(SELECT b FROM UNNEST([3, 2, 1]) b)) b "
+ "ON a = b";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testCaseNoValue() {
String sql = "SELECT CASE WHEN 1 > 2 THEN 'not possible' ELSE 'seems right' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValue() {
String sql = "SELECT CASE 1 WHEN 2 THEN 'not possible' ELSE 'seems right' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValueMultipleCases() {
String sql =
"SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' ELSE 'also not"
+ " possible' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValueNoElse() {
String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' WHEN 2 THEN 'seems right' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addStringField("str_field").build())
.addValue("seems right")
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseNoValueNoElseNoMatch() {
String sql = "SELECT CASE WHEN 'abc' = '123' THEN 'not possible' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build())
.addValue(null)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCaseWithValueNoElseNoMatch() {
String sql = "SELECT CASE 2 WHEN 1 THEN 'not possible' END";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addNullableField("str_field", FieldType.STRING).build())
.addValue(null)
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastToDateWithCase() {
String sql =
"SELECT f_int, \n"
+ "CASE WHEN CHAR_LENGTH(TRIM(f_string)) = 8 \n"
+ " THEN CAST (CONCAT(\n"
+ " SUBSTR(TRIM(f_string), 1, 4) \n"
+ " , '-' \n"
+ " , SUBSTR(TRIM(f_string), 5, 2) \n"
+ " , '-' \n"
+ " , SUBSTR(TRIM(f_string), 7, 2)) AS DATE)\n"
+ " ELSE NULL\n"
+ "END \n"
+ "FROM table_for_case_when";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType =
Schema.builder()
.addInt64Field("f_long")
.addNullableField("f_date", FieldType.logicalType(SqlTypes.DATE))
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L, LocalDate.parse("2018-10-18")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIntersectAll() {
String sql =
"SELECT Key FROM aggregate_test_table "
+ "INTERSECT ALL "
+ "SELECT Key FROM aggregate_test_table_two";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIntersectDistinct() {
String sql =
"SELECT Key FROM aggregate_test_table "
+ "INTERSECT DISTINCT "
+ "SELECT Key FROM aggregate_test_table_two";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L).build(),
Row.withSchema(resultType).addValues(2L).build(),
Row.withSchema(resultType).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testExceptAll() {
String sql =
"SELECT Key FROM aggregate_test_table "
+ "EXCEPT ALL "
+ "SELECT Key FROM aggregate_test_table_two";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema resultType = Schema.builder().addInt64Field("field").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(resultType).addValues(1L).build(),
Row.withSchema(resultType).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectFromEmptyTable() {
String sql = "SELECT * FROM table_empty;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
PAssert.that(stream).containsInAnyOrder();
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStartsWithString() {
String sql = "SELECT STARTS_WITH('string1', 'stri')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(true).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStartsWithString2() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createStringValue(""))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testStartsWithString3() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEndsWithString() {
String sql = "SELECT STARTS_WITH('string1', 'ng0')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEndsWithString2() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createStringValue(""))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testEndsWithString3() {
String sql = "SELECT STARTS_WITH(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.put("p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.BOOLEAN).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Boolean) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Does not support DateTime literal.")
public void testDateTimeLiteral() {
String sql = "SELECT DATETIME '2018-01-01 05:30:00.334'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage("Unsupported ResolvedLiteral type: DATETIME");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testConcatWithOneParameters() {
String sql = "SELECT concat('abc')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithTwoParameters() {
String sql = "SELECT concat('abc', 'def')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdef").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithThreeParameters() {
String sql = "SELECT concat('abc', 'def', 'xyz')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abcdefxyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithFourParameters() {
String sql = "SELECT concat('abc', 'def', ' ', 'xyz')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithFiveParameters() {
String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkk").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithSixParameters() {
String sql = "SELECT concat('abc', 'def', ' ', 'xyz', 'kkk', 'ttt')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues("abcdef xyzkkkttt").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithNull1() {
String sql = "SELECT CONCAT(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createStringValue(""),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatWithNull2() {
String sql = "SELECT CONCAT(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0",
Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1",
Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testNamedParameterQuery() {
String sql = "SELECT @ColA AS ColA";
ImmutableMap<String, Value> params = ImmutableMap.of("ColA", Value.createInt64Value(5));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(5L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testArrayStructLiteral() {
String sql = "SELECT ARRAY<STRUCT<INT64, INT64>>[(11, 12)];";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema innerSchema =
Schema.of(Field.of("s", FieldType.INT64), Field.of("i", FieldType.INT64));
final Schema schema =
Schema.of(Field.of("field1", FieldType.array(FieldType.row(innerSchema))));
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(ImmutableList.of(Row.withSchema(innerSchema).addValues(11L, 12L).build()))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParameterStruct() {
String sql = "SELECT @p as ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p",
Value.createStructValue(
TypeFactory.createStructType(
ImmutableList.of(
new StructType.StructField(
"s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING)),
new StructType.StructField(
"i", TypeFactory.createSimpleType(TypeKind.TYPE_INT64)))),
ImmutableList.of(Value.createStringValue("foo"), Value.createInt64Value(1L))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema innerSchema =
Schema.of(Field.of("s", FieldType.STRING), Field.of("i", FieldType.INT64));
final Schema schema = Schema.of(Field.of("field1", FieldType.row(innerSchema)));
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValue(Row.withSchema(innerSchema).addValues("foo", 1L).build())
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testParameterStructNested() {
String sql = "SELECT @outer_struct.inner_struct.s as ColA";
StructType innerStructType =
TypeFactory.createStructType(
ImmutableList.of(
new StructType.StructField(
"s", TypeFactory.createSimpleType(TypeKind.TYPE_STRING))));
ImmutableMap<String, Value> params =
ImmutableMap.of(
"outer_struct",
Value.createStructValue(
TypeFactory.createStructType(
ImmutableList.of(new StructType.StructField("inner_struct", innerStructType))),
ImmutableList.of(
Value.createStructValue(
innerStructType, ImmutableList.of(Value.createStringValue("foo"))))));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValue("foo").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatNamedParameterQuery() {
String sql = "SELECT CONCAT(@p0, @p1) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(""), "p1", Value.createStringValue("A"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("A").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testConcatPositionalParameterQuery() {
String sql = "SELECT CONCAT(?, ?, ?) AS ColA";
ImmutableList<Value> params =
ImmutableList.of(
Value.createStringValue("a"),
Value.createStringValue("b"),
Value.createStringValue("c"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace1() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue(""),
"p1", Value.createStringValue(""),
"p2", Value.createStringValue("a"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace2() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abc"),
"p1", Value.createStringValue(""),
"p2", Value.createStringValue("xyz"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abc").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace3() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue(""),
"p1", Value.createStringValue(""),
"p2", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testReplace4() {
String sql = "SELECT REPLACE(@p0, @p1, @p2) AS ColA";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p2", Value.createStringValue(""));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTrim1() {
String sql = "SELECT trim(@p0)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(" a b c "));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTrim2() {
String sql = "SELECT trim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTrim3() {
String sql = "SELECT trim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLTrim1() {
String sql = "SELECT ltrim(@p0)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(" a b c "));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("a b c ").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLTrim2() {
String sql = "SELECT ltrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("xyzab").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testLTrim3() {
String sql = "SELECT ltrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testRTrim1() {
String sql = "SELECT rtrim(@p0)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createStringValue(" a b c "));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(" a b c").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testRTrim2() {
String sql = "SELECT rtrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abxyzab"), "p1", Value.createStringValue("ab"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("abxyz").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testRTrim3() {
String sql = "SELECT rtrim(@p0, @p1)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING),
"p1", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field1", FieldType.STRING).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((String) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("https:
public void testCastBytesToString1() {
String sql = "SELECT CAST(@p0 AS STRING)";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createBytesValue(ByteString.copyFromUtf8("`")));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("`").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastBytesToString2() {
String sql = "SELECT CAST(b'b' AS STRING)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("https:
public void testCastBytesToStringFromTable() {
String sql = "SELECT CAST(bytes_col AS STRING) FROM table_all_types";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("1").build(),
Row.withSchema(schema).addValues("2").build(),
Row.withSchema(schema).addValues("3").build(),
Row.withSchema(schema).addValues("4").build(),
Row.withSchema(schema).addValues("5").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastStringToTS() {
String sql = "SELECT CAST('2019-01-15 13:21:03' AS TIMESTAMP)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field_1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(parseTimestampWithUTCTimeZone("2019-01-15 13:21:03"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastStringToString() {
String sql = "SELECT CAST(@p0 AS STRING)";
ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue(""));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCastStringToInt64() {
String sql = "SELECT CAST(@p0 AS INT64)";
ImmutableMap<String, Value> params = ImmutableMap.of("p0", Value.createStringValue("123"));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(123L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectConstant() {
String sql = "SELECT 'hi'";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("hi").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Does not support DATE_ADD.")
public void testDateAddWithParameter() {
String sql =
"SELECT "
+ "DATE_ADD(@p0, INTERVAL @p1 DAY), "
+ "DATE_ADD(@p2, INTERVAL @p3 DAY), "
+ "DATE_ADD(@p4, INTERVAL @p5 YEAR), "
+ "DATE_ADD(@p6, INTERVAL @p7 DAY), "
+ "DATE_ADD(@p8, INTERVAL @p9 MONTH)";
ImmutableMap<String, Value> params =
ImmutableMap.<String, Value>builder()
.put("p0", Value.createDateValue(0))
.put("p1", Value.createInt64Value(2L))
.put("p2", parseDateToValue("2019-01-01"))
.put("p3", Value.createInt64Value(2L))
.put("p4", Value.createSimpleNullValue(TypeKind.TYPE_DATE))
.put("p5", Value.createInt64Value(1L))
.put("p6", parseDateToValue("2000-02-29"))
.put("p7", Value.createInt64Value(-365L))
.put("p8", parseDateToValue("1999-03-31"))
.put("p9", Value.createInt64Value(-1L))
.build();
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addDateTimeField("field1")
.addDateTimeField("field2")
.addNullableField("field3", DATETIME)
.addDateTimeField("field4")
.addDateTimeField("field5")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
parseDate("1970-01-03"),
parseDate("2019-01-03"),
null,
parseDate("1999-03-01"),
parseDate("1999-02-28"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("Does not support TIME_ADD.")
public void testTimeAddWithParameter() {
String sql = "SELECT TIME_ADD(@p0, INTERVAL @p1 SECOND)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", parseTimeToValue("12:13:14.123"),
"p1", Value.createInt64Value(1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues(parseTime("12:13:15.123")).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampAddWithParameter1() {
String sql = "SELECT TIMESTAMP_ADD(@p0, INTERVAL @p1 MILLISECOND)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", parseTimestampWithTZToValue("2001-01-01 00:00:00+00"),
"p1", Value.createInt64Value(1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(parseTimestampWithTimeZone("2001-01-01 00:00:00.001+00"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTimestampAddWithParameter2() {
String sql = "SELECT TIMESTAMP_ADD(@p0, INTERVAL @p1 MINUTE)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", parseTimestampWithTZToValue("2008-12-25 15:30:00+07:30"),
"p1", Value.createInt64Value(10L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addDateTimeField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(parseTimestampWithTimeZone("2008-12-25 15:40:00+07:30"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("[BEAM-8593] ZetaSQL does not support Map type")
public void testSelectFromTableWithMap() {
String sql = "SELECT row_field FROM table_with_map";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema rowSchema = Schema.builder().addInt64Field("row_id").addStringField("data").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(Schema.builder().addRowField("row_field", rowSchema).build())
.addValues(Row.withSchema(rowSchema).addValues(1L, "data1").build())
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSubQuery() {
String sql = "select sum(Key) from KeyValue\n" + "group by (select Key)";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("Does not support sub-queries");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testSubstr() {
String sql = "SELECT substr(@p0, @p1, @p2)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abc"),
"p1", Value.createInt64Value(-2L),
"p2", Value.createInt64Value(1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field1").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("b").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSubstrWithLargeValueExpectException() {
String sql = "SELECT substr(@p0, @p1, @p2)";
ImmutableMap<String, Value> params =
ImmutableMap.of(
"p0", Value.createStringValue("abc"),
"p1", Value.createInt64Value(Integer.MAX_VALUE + 1L),
"p2", Value.createInt64Value(Integer.MIN_VALUE - 1L));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
thrown.expect(RuntimeException.class);
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectAll() {
String sql = "SELECT ALL Key, Value FROM KeyValue;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(14L, "KeyValue234").build(),
Row.withSchema(schema).addValues(15L, "KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectDistinct() {
String sql = "SELECT DISTINCT Key FROM aggregate_test_table;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(1L).build(),
Row.withSchema(schema).addValues(2L).build(),
Row.withSchema(schema).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectDistinct2() {
String sql =
"SELECT DISTINCT val.BYTES\n"
+ "from (select b\"BYTES\" BYTES union all\n"
+ " select b\"bytes\" union all\n"
+ " select b\"ByTeS\") val";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addByteArrayField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("BYTES".getBytes(StandardCharsets.UTF_8)).build(),
Row.withSchema(schema).addValues("ByTeS".getBytes(StandardCharsets.UTF_8)).build(),
Row.withSchema(schema).addValues("bytes".getBytes(StandardCharsets.UTF_8)).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectBytes() {
String sql = "SELECT b\"ByTes\"";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addByteArrayField("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("ByTes".getBytes(StandardCharsets.UTF_8)).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectExcept() {
String sql = "SELECT * EXCEPT (Key, ts) FROM KeyValue;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues("KeyValue234").build(),
Row.withSchema(schema).addValues("KeyValue235").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSelectReplace() {
String sql =
"WITH orders AS\n"
+ " (SELECT 5 as order_id,\n"
+ " \"sprocket\" as item_name,\n"
+ " 200 as quantity)\n"
+ "SELECT * REPLACE (\"widget\" AS item_name)\n"
+ "FROM orders";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("field1")
.addStringField("field2")
.addInt64Field("field3")
.build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues(5L, "widget", 200L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testUnionAllBasic() {
String sql =
"SELECT row_id FROM table_all_types UNION ALL SELECT row_id FROM table_all_types_2";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(1L).build(),
Row.withSchema(schema).addValue(2L).build(),
Row.withSchema(schema).addValue(3L).build(),
Row.withSchema(schema).addValue(4L).build(),
Row.withSchema(schema).addValue(5L).build(),
Row.withSchema(schema).addValue(6L).build(),
Row.withSchema(schema).addValue(7L).build(),
Row.withSchema(schema).addValue(8L).build(),
Row.withSchema(schema).addValue(9L).build(),
Row.withSchema(schema).addValue(10L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testAVGWithLongInput() {
String sql = "SELECT AVG(f_int_1) FROM aggregate_test_table;";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
thrown.expect(RuntimeException.class);
thrown.expectMessage(
"AVG(LONG) is not supported. You might want to use AVG(CAST(expression AS DOUBLE).");
zetaSQLQueryPlanner.convertToBeamRel(sql);
}
@Test
public void testReverseString() {
String sql = "SELECT REVERSE('abc');";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addStringField("field2").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues("cba").build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCharLength() {
String sql = "SELECT CHAR_LENGTH('abc');";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field").build();
PAssert.that(stream).containsInAnyOrder(Row.withSchema(schema).addValues(3L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testCharLengthNull() {
String sql = "SELECT CHAR_LENGTH(@p0);";
ImmutableMap<String, Value> params =
ImmutableMap.of("p0", Value.createSimpleNullValue(TypeKind.TYPE_STRING));
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addNullableField("field", FieldType.INT64).build();
PAssert.that(stream)
.containsInAnyOrder(Row.withSchema(schema).addValues((Object) null).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testTumbleAsTVF() {
String sql =
"select Key, Value, ts, window_start, window_end from "
+ "TUMBLE((select * from KeyValue), descriptor(ts), 'INTERVAL 1 SECOND')";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
ImmutableMap<String, Value> params = ImmutableMap.of();
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addInt64Field("Key")
.addStringField("Value")
.addDateTimeField("ts")
.addDateTimeField("window_start")
.addDateTimeField("window_end")
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema)
.addValues(
14L,
"KeyValue234",
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:06"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"))
.build(),
Row.withSchema(schema)
.addValues(
15L,
"KeyValue235",
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01 21:26:07"),
DateTimeUtils.parseTimestampWithUTCTimeZone("2018-07-01T21:26:08"))
.build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testIsNullTrueFalse() {
String sql =
"WITH Src AS (\n"
+ " SELECT NULL as data UNION ALL\n"
+ " SELECT TRUE UNION ALL\n"
+ " SELECT FALSE\n"
+ ")\n"
+ "SELECT\n"
+ " data IS NULL as isnull,\n"
+ " data IS NOT NULL as isnotnull,\n"
+ " data IS TRUE as istrue,\n"
+ " data IS NOT TRUE as isnottrue,\n"
+ " data IS FALSE as isfalse,\n"
+ " data IS NOT FALSE as isnotfalse\n"
+ "FROM Src\n";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
ImmutableMap<String, Value> params = ImmutableMap.of();
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql, params);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema =
Schema.builder()
.addField("isnull", FieldType.BOOLEAN)
.addField("isnotnull", FieldType.BOOLEAN)
.addField("istrue", FieldType.BOOLEAN)
.addField("isnottrue", FieldType.BOOLEAN)
.addField("isfalse", FieldType.BOOLEAN)
.addField("isnotfalse", FieldType.BOOLEAN)
.build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(true, false, false, true, false, true).build(),
Row.withSchema(schema).addValues(false, true, true, false, false, true).build(),
Row.withSchema(schema).addValues(false, true, false, true, true, false).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testZetaSQLBitOr() {
String sql = "SELECT BIT_OR(row_id) FROM table_all_types GROUP BY bool_col";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValues(3L).build(),
Row.withSchema(schema).addValue(7L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
@Ignore("NULL values don't work correctly. (https:
public void testZetaSQLBitAnd() {
String sql = "SELECT BIT_AND(row_id) FROM table_all_types GROUP BY bool_col";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
final Schema schema = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(schema).addValue(1L).build(),
Row.withSchema(schema).addValue(0L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
@Test
public void testSimpleTableName() {
String sql = "SELECT Key FROM KeyValue";
ZetaSQLQueryPlanner zetaSQLQueryPlanner = new ZetaSQLQueryPlanner(config);
BeamRelNode beamRelNode = zetaSQLQueryPlanner.convertToBeamRel(sql);
PCollection<Row> stream = BeamSqlRelUtils.toPCollection(pipeline, beamRelNode);
Schema singleField = Schema.builder().addInt64Field("field1").build();
PAssert.that(stream)
.containsInAnyOrder(
Row.withSchema(singleField).addValues(14L).build(),
Row.withSchema(singleField).addValues(15L).build());
pipeline.run().waitUntilFinish(Duration.standardMinutes(PIPELINE_EXECUTION_WAITTIME_MINUTES));
}
} |
We should get this error for both L:491 and L:492 right? | public void testTableNegativeCases() {
CompileResult compileResult = BCompileUtil.compile("test-src/types/table/table-negative.bal");
int index = 0;
validateError(compileResult, index++, "unknown type 'CusTable'",
15, 1);
validateError(compileResult, index++, "table key specifier mismatch. expected: '[id]' " +
"but found '[id, firstName]'", 20, 28);
validateError(compileResult, index++, "table key specifier mismatch with key constraint. " +
"expected: '[string]' fields but key specifier is empty", 25, 20);
validateError(compileResult, index++, "table key specifier '[age]' does not match with " +
"key constraint type '[string]'", 30, 26);
validateError(compileResult, index++, "table key specifier mismatch. expected: '[id]' but " +
"found '[address]'", 35, 44);
validateError(compileResult, index++, "member access is not supported for keyless table " +
"'customerTable'", 45, 21);
validateError(compileResult, index++, "invalid constraint type. expected subtype of " +
"'map<any|error>' but found 'int'", 47, 7);
validateError(compileResult, index++, "invalid member access with 'map': member access with " +
"multi-key expression is only allowed with subtypes of 'table'", 52, 13);
validateError(compileResult, index++, "field 'name' used in key specifier must be a readonly " +
"field", 62, 34);
validateError(compileResult, index++, "field 'name' used in key specifier must be a required " +
"field", 75, 28);
validateError(compileResult, index++, "value expression of key specifier 'id' must be a " +
"constant expression", 82, 41);
validateError(compileResult, index++, "member access is not supported for keyless table " +
"'keylessCusTab'", 87, 27);
validateError(compileResult, index++, "value expression of key specifier 'id' must be a " +
"constant expression", 90, 33);
validateError(compileResult, index++, "incompatible types: expected 'table<Customer> " +
"key<string>', found 'table<Customer> key<int>'", 95, 56);
validateError(compileResult, index++, "field name 'no' used in key specifier is not " +
"found in table constraint type 'record {| int id; string name; string lname?; " +
"string address?; |}'", 102, 21);
validateError(compileResult, index++, "field 'address' used in key specifier must be a " +
"readonly field", 108, 21);
validateError(compileResult, index++, "table with constraint of type map cannot have key " +
"specifier or key type constraint", 114, 21);
validateError(compileResult, index++, "table with constraint of type map cannot have key " +
"specifier or key type constraint", 120, 21);
validateError(compileResult, index++, "cannot infer the member type from table constructor; " +
"no values are provided in table constructor", 128, 25);
validateError(compileResult, index++, "incompatible types: expected 'Customer', found 'Customer?'",
135, 25);
validateError(compileResult, index++, "incompatible types: expected 'User', found '(User|Customer)?'",
141, 17);
validateError(compileResult, index++, "incompatible types: expected 'Customer', found 'Customer?'",
148, 25);
validateError(compileResult, index++, "field 'name' used in key specifier must be a readonly field",
156, 36);
validateError(compileResult, index++, "invalid type 'k' for field 'Row' used in key specifier, " +
"expected sub type of anydata", 169, 12);
validateError(compileResult, index++, "value expression of key specifier 'k' must be a " +
"constant expression", 170, 5);
validateError(compileResult, index++, "value expression of key specifier 'k' must be a " +
"constant expression", 182, 5);
validateError(compileResult, index++, "value expression of key specifier 'm' must be a " +
"constant expression", 188, 5);
validateError(compileResult, index++, "invalid constraint type. expected subtype of " +
"'map<any|error>' but found 'any'", 191, 25);
validateError(compileResult, index++, "invalid constraint type. expected subtype of " +
"'map<any|error>' but found 'any'", 194, 14);
validateError(compileResult, index++, "field name 'id' used in key specifier is not " +
"found in table constraint type 'Person'", 197, 19);
validateError(compileResult, index++, "field name 'invalidField' used in key specifier " +
"is not found in table constraint type 'Person'", 198, 19);
validateError(compileResult, index++, "table key specifier '[leaves]' does not match " +
"with key constraint type '[EmployeeId]'", 211, 47);
validateError(compileResult, index++, "table key specifier mismatch with key constraint. " +
"expected: '[string, string]' fields but found '[firstname]'", 213, 47);
validateError(compileResult, index++, "field name 'firstname' used in key specifier " +
"is not found in table constraint type 'CustomerDetail'", 230, 35);
validateError(compileResult, index++, "value expression of key specifier 'id' must be " +
"a constant expression", 237, 9);
validateError(compileResult, index++, "incompatible types: expected 'table<record {| string name?; |}>',"
+ " found 'table<record {| (string|int|boolean) name?; (int|boolean)...; |}>'", 254, 41);
validateError(compileResult, index++, "incompatible types: expected 'table<record {| string name?; |}>'," +
" found 'table<record {| (string|int) name?; int...; |}>'", 263, 41);
validateError(compileResult, index++, "incompatible types: expected 'table<record {| (string|int) name?; |}>'" +
", found 'table<record {| (string|int) name?; int...; |}>'",
264, 45);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (int|string) a; |}>'", 276, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| int i; int j?; never k?; string l?; never...; |}>'", 291, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (anydata|error) a; |}>'", 301, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (any|error) a; |}>'", 311, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (0|1|2|3) a; |}>'", 324, 13);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl1'", 334, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl2'", 340, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl3'", 346, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl4'", 352, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl5'", 358, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl6'", 364, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl7'", 370, 9);
validateError(compileResult, index++, "cannot update 'table<Customer>' with member access expression", 378, 5);
validateError(compileResult, index++, "cannot update 'table<Customer>' with member access expression", 384, 5);
validateError(compileResult, index++, "cannot update 'table<record {| string name?; |}>' with " +
"member access expression", 390, 5);
validateError(compileResult, index++, "cannot update 'table<record {| string name?; anydata...; |}>' with " +
"member access expression", 396, 5);
validateError(compileResult, index++, "cannot update 'table<(Customer & readonly)> & readonly' with " +
"member access expression", 402, 5);
validateError(compileResult, index++, "cannot update 'table<(record {| string name?; |} & readonly)> & " +
"readonly' with member access expression", 408, 5);
validateError(compileResult, index++, "cannot update 'table<(User|Customer)>' with member access " +
"expression", 414, 5);
validateError(compileResult, index++, "incompatible types: expected " +
"'table<record {| int id; string firstName; string lastName; |}>', found 'CustomerEmptyKeyedTbl'",
422, 76);
validateError(compileResult, index++, "incompatible types: expected 'CustomerTable', " +
"found 'CustomerEmptyKeyedTbl'", 424, 23);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl2'", 433, 9);
validateError(compileResult, index++, "cannot update 'table<Customer>' with member access expression", 434, 5);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,int,int]'", 448, 21);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,string,string]'",
462, 21);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,int,int,int]'", 469, 21);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,int,int]'", 478, 21);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
491, 13);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
496, 13);
} | validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression", | public void testTableNegativeCases() {
CompileResult compileResult = BCompileUtil.compile("test-src/types/table/table-negative.bal");
int index = 0;
validateError(compileResult, index++, "unknown type 'CusTable'",
15, 1);
validateError(compileResult, index++, "table key specifier mismatch. expected: '[id]' " +
"but found '[id, firstName]'", 20, 28);
validateError(compileResult, index++, "table key specifier mismatch with key constraint. " +
"expected: '[string]' fields but key specifier is empty", 25, 20);
validateError(compileResult, index++, "table key specifier '[age]' does not match with " +
"key constraint type '[string]'", 30, 26);
validateError(compileResult, index++, "table key specifier mismatch. expected: '[id]' but " +
"found '[address]'", 35, 44);
validateError(compileResult, index++, "member access is not supported for keyless table " +
"'customerTable'", 45, 21);
validateError(compileResult, index++, "invalid constraint type. expected subtype of " +
"'map<any|error>' but found 'int'", 47, 7);
validateError(compileResult, index++, "invalid member access with 'map': member access with " +
"multi-key expression is only allowed with subtypes of 'table'", 52, 13);
validateError(compileResult, index++, "field 'name' used in key specifier must be a readonly " +
"field", 62, 34);
validateError(compileResult, index++, "field 'name' used in key specifier must be a required " +
"field", 75, 28);
validateError(compileResult, index++, "value expression of key specifier 'id' must be a " +
"constant expression", 82, 41);
validateError(compileResult, index++, "member access is not supported for keyless table " +
"'keylessCusTab'", 87, 27);
validateError(compileResult, index++, "value expression of key specifier 'id' must be a " +
"constant expression", 90, 33);
validateError(compileResult, index++, "incompatible types: expected 'table<Customer> " +
"key<string>', found 'table<Customer> key<int>'", 95, 56);
validateError(compileResult, index++, "field name 'no' used in key specifier is not " +
"found in table constraint type 'record {| int id; string name; string lname?; " +
"string address?; |}'", 102, 21);
validateError(compileResult, index++, "field 'address' used in key specifier must be a " +
"readonly field", 108, 21);
validateError(compileResult, index++, "table with constraint of type map cannot have key " +
"specifier or key type constraint", 114, 21);
validateError(compileResult, index++, "table with constraint of type map cannot have key " +
"specifier or key type constraint", 120, 21);
validateError(compileResult, index++, "cannot infer the member type from table constructor; " +
"no values are provided in table constructor", 128, 25);
validateError(compileResult, index++, "incompatible types: expected 'Customer', found 'Customer?'",
135, 25);
validateError(compileResult, index++, "incompatible types: expected 'User', found '(User|Customer)?'",
141, 17);
validateError(compileResult, index++, "incompatible types: expected 'Customer', found 'Customer?'",
148, 25);
validateError(compileResult, index++, "field 'name' used in key specifier must be a readonly field",
156, 36);
validateError(compileResult, index++, "invalid type 'k' for field 'Row' used in key specifier, " +
"expected sub type of anydata", 169, 12);
validateError(compileResult, index++, "value expression of key specifier 'k' must be a " +
"constant expression", 170, 5);
validateError(compileResult, index++, "value expression of key specifier 'k' must be a " +
"constant expression", 182, 5);
validateError(compileResult, index++, "value expression of key specifier 'm' must be a " +
"constant expression", 188, 5);
validateError(compileResult, index++, "invalid constraint type. expected subtype of " +
"'map<any|error>' but found 'any'", 191, 25);
validateError(compileResult, index++, "invalid constraint type. expected subtype of " +
"'map<any|error>' but found 'any'", 194, 14);
validateError(compileResult, index++, "field name 'id' used in key specifier is not " +
"found in table constraint type 'Person'", 197, 19);
validateError(compileResult, index++, "field name 'invalidField' used in key specifier " +
"is not found in table constraint type 'Person'", 198, 19);
validateError(compileResult, index++, "table key specifier '[leaves]' does not match " +
"with key constraint type '[EmployeeId]'", 211, 47);
validateError(compileResult, index++, "table key specifier mismatch with key constraint. " +
"expected: '[string, string]' fields but found '[firstname]'", 213, 47);
validateError(compileResult, index++, "field name 'firstname' used in key specifier " +
"is not found in table constraint type 'CustomerDetail'", 230, 35);
validateError(compileResult, index++, "value expression of key specifier 'id' must be " +
"a constant expression", 237, 9);
validateError(compileResult, index++, "value expression of key specifier 'id' must be " +
"a constant expression", 240, 9);
validateError(compileResult, index++, "incompatible types: expected 'table<record {| string name?; |}>',"
+ " found 'table<record {| (string|int|boolean) name?; (int|boolean)...; |}>'", 254, 41);
validateError(compileResult, index++, "incompatible types: expected 'table<record {| string name?; |}>'," +
" found 'table<record {| (string|int) name?; int...; |}>'", 263, 41);
validateError(compileResult, index++, "incompatible types: expected 'table<record {| (string|int) name?; |}>'" +
", found 'table<record {| (string|int) name?; int...; |}>'",
264, 45);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (int|string) a; |}>'", 276, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| int i; int j?; never k?; string l?; never...; |}>'", 291, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (anydata|error) a; |}>'", 301, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (any|error) a; |}>'", 311, 13);
validateError(compileResult, index++, "incompatible types: expected 'int'," +
" found 'table<record {| (0|1|2|3) a; |}>'", 324, 13);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl1'", 334, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl2'", 340, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl3'", 346, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl4'", 352, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl5'", 358, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl6'", 364, 9);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl7'", 370, 9);
validateError(compileResult, index++, "cannot update 'table<Customer>' with member access expression", 378, 5);
validateError(compileResult, index++, "cannot update 'table<Customer>' with member access expression", 384, 5);
validateError(compileResult, index++, "cannot update 'table<record {| string name?; |}>' with " +
"member access expression", 390, 5);
validateError(compileResult, index++, "cannot update 'table<record {| string name?; anydata...; |}>' with " +
"member access expression", 396, 5);
validateError(compileResult, index++, "cannot update 'table<(Customer & readonly)> & readonly' with " +
"member access expression", 402, 5);
validateError(compileResult, index++, "cannot update 'table<(record {| string name?; |} & readonly)> & " +
"readonly' with member access expression", 408, 5);
validateError(compileResult, index++, "cannot update 'table<(User|Customer)>' with member access " +
"expression", 414, 5);
validateError(compileResult, index++, "incompatible types: expected " +
"'table<record {| int id; string firstName; string lastName; |}>', found 'CustomerEmptyKeyedTbl'",
422, 76);
validateError(compileResult, index++, "incompatible types: expected 'CustomerTable', " +
"found 'CustomerEmptyKeyedTbl'", 424, 23);
validateError(compileResult, index++, "member access is not supported for keyless table 'tbl2'", 433, 9);
validateError(compileResult, index++, "cannot update 'table<Customer>' with member access expression", 434, 5);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,int,int]'", 448, 21);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,string,string]'",
462, 21);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,int,int,int]'", 469, 21);
validateError(compileResult, index++, "incompatible types: expected 'int', found '[int,int,int]'", 478, 21);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
491, 5);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
492, 5);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
496, 5);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
497, 5);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
509, 5);
validateError(compileResult, index++, "value expression of key specifier 'y' must be a constant expression",
509, 5);
validateError(compileResult, index++, "value expression of key specifier 'x' must be a constant expression",
510, 5);
validateError(compileResult, index++, "value expression of key specifier 'y' must be a constant expression",
510, 5);
validateError(compileResult, index++, "value expression of key specifier 'z' must be a constant expression",
510, 5);
Assert.assertEquals(compileResult.getErrorCount(), index);
} | class TableNegativeTest {
@Test
@Test
public void testTableKeyViolations() {
CompileResult compileResult = BCompileUtil.compile("test-src/types/table/table_key_violations.bal");
Assert.assertEquals(compileResult.getErrorCount(), 9);
int index = 0;
validateError(compileResult, index++, "duplicate key found in table row key('id') : '13'",
9, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id, firstName') : '13, Foo'",
15, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') "
+ ": 'BLangXMLElementLiteral: <BLangXMLQName: () id> </BLangXMLQName: () id> [][123]'",
45, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') : " +
"'BLangXMLElementLiteral: <BLangXMLQName: (p) id> </BLangXMLQName: (p) id> " +
"[BLangXMLAttribute: BLangXMLQName: (xmlns) p=BLangXMLQuotedString: (DOUBLE_QUOTE) " +
"[http:
"[Contents], BLangXMLElementLiteral: <BLangXMLQName: (p) empId> " +
"</BLangXMLQName: (p) empId> [][5005]]'",
54, 9);
validateError(compileResult, index++, "duplicate key found in table row key('firstName') : '<string> " +
"(name is string && ! invalid?(BLangStringTemplateLiteral: [Hello , name, !!!]):James)'",
64, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') : '[5005, 5006]'",
76, 5);
validateError(compileResult, index++, "duplicate key found in table row key('id') : ' '",
102, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') : " +
"'<(byte[] & readonly)> (base16 `5A`)'",
128, 9);
validateError(compileResult, index, "duplicate key found in table row key('id') : 'ID2'",
136, 9);
}
@Test
public void testAnyTypedTableWithKeySpecifiers() {
CompileResult compileResult = BCompileUtil.compile("test-src/types/table/table-value-any-negative.bal");
int index = 0;
validateError(compileResult, index++,
"key specifier not allowed when the target type is any", 18, 20);
validateError(compileResult, index++,
"key specifier not allowed when the target type is any", 25, 15);
Assert.assertEquals(compileResult.getErrorCount(), index);
}
} | class TableNegativeTest {
@Test
@Test
public void testTableKeyViolations() {
CompileResult compileResult = BCompileUtil.compile("test-src/types/table/table_key_violations.bal");
Assert.assertEquals(compileResult.getErrorCount(), 9);
int index = 0;
validateError(compileResult, index++, "duplicate key found in table row key('id') : '13'",
9, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id, firstName') : '13, Foo'",
15, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') "
+ ": 'BLangXMLElementLiteral: <BLangXMLQName: () id> </BLangXMLQName: () id> [][123]'",
45, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') : " +
"'BLangXMLElementLiteral: <BLangXMLQName: (p) id> </BLangXMLQName: (p) id> " +
"[BLangXMLAttribute: BLangXMLQName: (xmlns) p=BLangXMLQuotedString: (DOUBLE_QUOTE) " +
"[http:
"[Contents], BLangXMLElementLiteral: <BLangXMLQName: (p) empId> " +
"</BLangXMLQName: (p) empId> [][5005]]'",
54, 9);
validateError(compileResult, index++, "duplicate key found in table row key('firstName') : '<string> " +
"(name is string && ! invalid?(BLangStringTemplateLiteral: [Hello , name, !!!]):James)'",
64, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') : '[5005, 5006]'",
76, 5);
validateError(compileResult, index++, "duplicate key found in table row key('id') : ' '",
102, 9);
validateError(compileResult, index++, "duplicate key found in table row key('id') : " +
"'<(byte[] & readonly)> (base16 `5A`)'",
128, 9);
validateError(compileResult, index, "duplicate key found in table row key('id') : 'ID2'",
136, 9);
}
@Test
public void testAnyTypedTableWithKeySpecifiers() {
CompileResult compileResult = BCompileUtil.compile("test-src/types/table/table-value-any-negative.bal");
int index = 0;
validateError(compileResult, index++,
"key specifier not allowed when the target type is any", 18, 20);
validateError(compileResult, index++,
"key specifier not allowed when the target type is any", 25, 15);
Assert.assertEquals(compileResult.getErrorCount(), index);
}
} |
I combined both cases. Earlier we only do the delay on success case, now we will also need to do on error case. Since they are using the same logic, so I combined into one | private Mono<Void> run(CancellationToken cancellationToken) {
return Flux.just(this)
.flatMap(value -> this.leaseContainer.getAllLeases())
.collectList()
.flatMap(allLeases -> {
if (cancellationToken.isCancellationRequested()) return Mono.empty();
List<Lease> leasesToTake = this.partitionLoadBalancingStrategy.selectLeasesToTake(allLeases);
if (leasesToTake.size() > 0) {
this.logger.info("Found {} total leases, taking ownership of {}", allLeases.size(), leasesToTake.size());
}
if (cancellationToken.isCancellationRequested()) return Mono.empty();
return Flux.fromIterable(leasesToTake)
.limitRate(1)
.flatMap(lease -> {
if (cancellationToken.isCancellationRequested()) return Mono.empty();
return this.partitionController.addOrUpdateLease(lease);
})
.then();
})
.onErrorResume(throwable -> {
logger.warn("Unexpected exception thrown while trying to acquire available leases", throwable);
return Mono.empty();
})
.then(
Mono.just(this)
.flatMap(value -> {
if (cancellationToken.isCancellationRequested()) {
return Mono.empty();
}
Instant stopTimer = Instant.now().plus(this.leaseAcquireInterval);
return Mono.just(value)
.delayElement(Duration.ofMillis(100), CosmosSchedulers.COSMOS_PARALLEL)
.repeat(() -> {
Instant currentTime = Instant.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
})
.then();
})
)
.repeat(() -> !cancellationToken.isCancellationRequested())
.then()
.onErrorResume(throwable -> {
logger.info("Partition load balancer task stopped.");
return this.stop();
});
} | .then(); | private Mono<Void> run(CancellationToken cancellationToken) {
return Flux.just(this)
.flatMap(value -> this.leaseContainer.getAllLeases())
.collectList()
.flatMap(allLeases -> {
if (cancellationToken.isCancellationRequested()) return Mono.empty();
List<Lease> leasesToTake = this.partitionLoadBalancingStrategy.selectLeasesToTake(allLeases);
if (leasesToTake.size() > 0) {
this.logger.info("Found {} total leases, taking ownership of {}", allLeases.size(), leasesToTake.size());
}
if (cancellationToken.isCancellationRequested()) return Mono.empty();
return Flux.fromIterable(leasesToTake)
.limitRate(1)
.flatMap(lease -> {
if (cancellationToken.isCancellationRequested()) return Mono.empty();
return this.partitionController.addOrUpdateLease(lease);
})
.then();
})
.onErrorResume(throwable -> {
logger.warn("Unexpected exception thrown while trying to acquire available leases", throwable);
return Mono.empty();
})
.then(
Mono.just(this)
.flatMap(value -> {
if (cancellationToken.isCancellationRequested()) {
return Mono.empty();
}
Instant stopTimer = Instant.now().plus(this.leaseAcquireInterval);
return Mono.just(value)
.delayElement(Duration.ofMillis(100), CosmosSchedulers.COSMOS_PARALLEL)
.repeat(() -> {
Instant currentTime = Instant.now();
return !cancellationToken.isCancellationRequested() && currentTime.isBefore(stopTimer);
})
.then();
})
)
.repeat(() -> !cancellationToken.isCancellationRequested())
.then()
.onErrorResume(throwable -> {
logger.info("Partition load balancer task stopped.");
return this.stop();
});
} | class PartitionLoadBalancerImpl implements PartitionLoadBalancer {
private final Logger logger = LoggerFactory.getLogger(PartitionLoadBalancerImpl.class);
private final PartitionController partitionController;
private final LeaseContainer leaseContainer;
private final PartitionLoadBalancingStrategy partitionLoadBalancingStrategy;
private final Duration leaseAcquireInterval;
private final Scheduler scheduler;
private CancellationTokenSource cancellationTokenSource;
private volatile boolean started;
private final Object lock;
public PartitionLoadBalancerImpl(
PartitionController partitionController,
LeaseContainer leaseContainer,
PartitionLoadBalancingStrategy partitionLoadBalancingStrategy,
Duration leaseAcquireInterval,
Scheduler scheduler) {
checkNotNull(partitionController, "Argument 'partitionController' can not be null");
checkNotNull(leaseContainer, "Argument 'leaseContainer' can not be null");
checkNotNull(partitionLoadBalancingStrategy, "Argument 'partitionLoadBalancingStrategy' can not be null");
checkNotNull(scheduler, "Argument 'scheduler' can not be null");
this.partitionController = partitionController;
this.leaseContainer = leaseContainer;
this.partitionLoadBalancingStrategy = partitionLoadBalancingStrategy;
this.leaseAcquireInterval = leaseAcquireInterval;
this.scheduler = scheduler;
this.started = false;
this.lock = new Object();
}
@Override
public Mono<Void> start() {
synchronized (lock) {
if (this.started) {
throw new IllegalStateException("Partition load balancer already started");
}
this.cancellationTokenSource = new CancellationTokenSource();
this.started = true;
}
return Mono.fromRunnable( () -> {
scheduler.schedule(() -> this.run(this.cancellationTokenSource.getToken()).subscribe());
});
}
@Override
public Mono<Void> stop() {
synchronized (lock) {
this.started = false;
this.cancellationTokenSource.cancel();
}
return this.partitionController.shutdown();
}
@Override
public boolean isRunning() {
return this.started;
}
} | class PartitionLoadBalancerImpl implements PartitionLoadBalancer {
private final Logger logger = LoggerFactory.getLogger(PartitionLoadBalancerImpl.class);
private final PartitionController partitionController;
private final LeaseContainer leaseContainer;
private final PartitionLoadBalancingStrategy partitionLoadBalancingStrategy;
private final Duration leaseAcquireInterval;
private final Scheduler scheduler;
private CancellationTokenSource cancellationTokenSource;
private volatile boolean started;
private final Object lock;
public PartitionLoadBalancerImpl(
PartitionController partitionController,
LeaseContainer leaseContainer,
PartitionLoadBalancingStrategy partitionLoadBalancingStrategy,
Duration leaseAcquireInterval,
Scheduler scheduler) {
checkNotNull(partitionController, "Argument 'partitionController' can not be null");
checkNotNull(leaseContainer, "Argument 'leaseContainer' can not be null");
checkNotNull(partitionLoadBalancingStrategy, "Argument 'partitionLoadBalancingStrategy' can not be null");
checkNotNull(scheduler, "Argument 'scheduler' can not be null");
this.partitionController = partitionController;
this.leaseContainer = leaseContainer;
this.partitionLoadBalancingStrategy = partitionLoadBalancingStrategy;
this.leaseAcquireInterval = leaseAcquireInterval;
this.scheduler = scheduler;
this.started = false;
this.lock = new Object();
}
@Override
public Mono<Void> start() {
synchronized (lock) {
if (this.started) {
throw new IllegalStateException("Partition load balancer already started");
}
this.cancellationTokenSource = new CancellationTokenSource();
this.started = true;
}
return Mono.fromRunnable( () -> {
scheduler.schedule(() -> this.run(this.cancellationTokenSource.getToken()).subscribe());
});
}
@Override
public Mono<Void> stop() {
synchronized (lock) {
this.started = false;
this.cancellationTokenSource.cancel();
}
return this.partitionController.shutdown();
}
@Override
public boolean isRunning() {
return this.started;
}
} |
Yes, it's used in the next commit to persist in-flight data (replaces `notifyBufferReceived`). | public Optional<BufferOrEvent> pollNext() throws IOException, InterruptedException {
Optional<BufferOrEvent> next = inputGate.pollNext();
if (!next.isPresent()) {
return handleEmptyBuffer();
}
BufferOrEvent bufferOrEvent = next.get();
checkState(!barrierHandler.isBlocked(bufferOrEvent.getChannelInfo()));
if (bufferOrEvent.isEvent()) {
handleEvent(bufferOrEvent);
} else {
barrierHandler.processBuffer(bufferOrEvent.getBuffer(), bufferOrEvent.getChannelInfo());
}
return next;
} | barrierHandler.processBuffer(bufferOrEvent.getBuffer(), bufferOrEvent.getChannelInfo()); | public Optional<BufferOrEvent> pollNext() throws IOException, InterruptedException {
Optional<BufferOrEvent> next = inputGate.pollNext();
if (!next.isPresent()) {
return handleEmptyBuffer();
}
BufferOrEvent bufferOrEvent = next.get();
checkState(!barrierHandler.isBlocked(bufferOrEvent.getChannelInfo()));
if (bufferOrEvent.isEvent()) {
handleEvent(bufferOrEvent);
}
return next;
} | class CheckpointedInputGate implements PullingAsyncDataInput<BufferOrEvent>, Closeable {
private final CheckpointBarrierHandler barrierHandler;
/** The gate that the buffer draws its input from. */
private final InputGate inputGate;
private final MailboxExecutor mailboxExecutor;
/** Indicate end of the input. */
private boolean isFinished;
/**
* Creates a new checkpoint stream aligner.
*
* <p>The aligner will allow only alignments that buffer up to the given number of bytes.
* When that number is exceeded, it will stop the alignment and notify the task that the
* checkpoint has been cancelled.
*
* @param inputGate The input gate to draw the buffers and events from.
* @param barrierHandler Handler that controls which channels are blocked.
*/
public CheckpointedInputGate(
InputGate inputGate,
CheckpointBarrierHandler barrierHandler,
MailboxExecutor mailboxExecutor) {
this.inputGate = inputGate;
this.barrierHandler = barrierHandler;
this.mailboxExecutor = mailboxExecutor;
waitForPriorityEvents(inputGate, mailboxExecutor);
}
/**
* Eagerly pulls and processes all priority events. Must be called from task thread.
*
* <p>Basic assumption is that no priority event needs to be handled by the {@link StreamTaskNetworkInput}.
*/
private void processPriorityEvents() throws IOException, InterruptedException {
final boolean hasPriorityEvents = inputGate.getPriorityEventAvailableFuture().isDone();
if (hasPriorityEvents) {
while (pollNext().map(BufferOrEvent::morePriorityEvents).orElse(false)) {
}
}
waitForPriorityEvents(inputGate, mailboxExecutor);
}
private void waitForPriorityEvents(InputGate inputGate, MailboxExecutor mailboxExecutor) {
final CompletableFuture<?> priorityEventAvailableFuture = inputGate.getPriorityEventAvailableFuture();
priorityEventAvailableFuture.thenRun(() -> {
mailboxExecutor.execute(this::processPriorityEvents, "process priority even @ gate %s", inputGate);
});
}
@Override
public CompletableFuture<?> getAvailableFuture() {
return inputGate.getAvailableFuture();
}
@Override
private void handleEvent(BufferOrEvent bufferOrEvent) throws IOException {
if (bufferOrEvent.getEvent().getClass() == CheckpointBarrier.class) {
CheckpointBarrier checkpointBarrier = (CheckpointBarrier) bufferOrEvent.getEvent();
barrierHandler.processBarrier(checkpointBarrier, bufferOrEvent.getChannelInfo());
}
else if (bufferOrEvent.getEvent().getClass() == CancelCheckpointMarker.class) {
barrierHandler.processCancellationBarrier((CancelCheckpointMarker) bufferOrEvent.getEvent());
}
else if (bufferOrEvent.getEvent().getClass() == EndOfPartitionEvent.class) {
barrierHandler.processEndOfPartition();
}
}
public void spillInflightBuffers(
long checkpointId,
int channelIndex,
ChannelStateWriter channelStateWriter) throws IOException {
InputChannel channel = inputGate.getChannel(channelIndex);
if (barrierHandler.hasInflightData(checkpointId, channel.getChannelInfo())) {
channel.spillInflightBuffers(checkpointId, channelStateWriter);
}
}
public CompletableFuture<Void> getAllBarriersReceivedFuture(long checkpointId) {
return barrierHandler.getAllBarriersReceivedFuture(checkpointId);
}
private Optional<BufferOrEvent> handleEmptyBuffer() {
if (inputGate.isFinished()) {
isFinished = true;
}
return Optional.empty();
}
@Override
public boolean isFinished() {
return isFinished;
}
/**
* Cleans up all internally held resources.
*
* @throws IOException Thrown if the cleanup of I/O resources failed.
*/
public void close() throws IOException {
barrierHandler.close();
}
/**
* Gets the ID defining the current pending, or just completed, checkpoint.
*
* @return The ID of the pending of completed checkpoint.
*/
@VisibleForTesting
long getLatestCheckpointId() {
return barrierHandler.getLatestCheckpointId();
}
/**
* Gets the time that the latest alignment took, in nanoseconds.
* If there is currently an alignment in progress, it will return the time spent in the
* current alignment so far.
*
* @return The duration in nanoseconds
*/
@VisibleForTesting
long getAlignmentDurationNanos() {
return barrierHandler.getAlignmentDurationNanos();
}
/**
* @return the time that elapsed, in nanoseconds, between the creation of the latest checkpoint
* and the time when it's first {@link CheckpointBarrier} was received by this {@link InputGate}.
*/
@VisibleForTesting
long getCheckpointStartDelayNanos() {
return barrierHandler.getCheckpointStartDelayNanos();
}
/**
* @return number of underlying input channels.
*/
public int getNumberOfInputChannels() {
return inputGate.getNumberOfInputChannels();
}
@Override
public String toString() {
return barrierHandler.toString();
}
public InputChannel getChannel(int channelIndex) {
return inputGate.getChannel(channelIndex);
}
public List<InputChannelInfo> getChannelInfos() {
return inputGate.getChannelInfos();
}
@VisibleForTesting
CheckpointBarrierHandler getCheckpointBarrierHandler() {
return barrierHandler;
}
} | class CheckpointedInputGate implements PullingAsyncDataInput<BufferOrEvent>, Closeable {
private final CheckpointBarrierHandler barrierHandler;
/** The gate that the buffer draws its input from. */
private final InputGate inputGate;
private final MailboxExecutor mailboxExecutor;
/** Indicate end of the input. */
private boolean isFinished;
/**
* Creates a new checkpoint stream aligner.
*
* <p>The aligner will allow only alignments that buffer up to the given number of bytes.
* When that number is exceeded, it will stop the alignment and notify the task that the
* checkpoint has been cancelled.
*
* @param inputGate The input gate to draw the buffers and events from.
* @param barrierHandler Handler that controls which channels are blocked.
*/
public CheckpointedInputGate(
InputGate inputGate,
CheckpointBarrierHandler barrierHandler,
MailboxExecutor mailboxExecutor) {
this.inputGate = inputGate;
this.barrierHandler = barrierHandler;
this.mailboxExecutor = mailboxExecutor;
waitForPriorityEvents(inputGate, mailboxExecutor);
}
/**
* Eagerly pulls and processes all priority events. Must be called from task thread.
*
* <p>Basic assumption is that no priority event needs to be handled by the {@link StreamTaskNetworkInput}.
*/
private void processPriorityEvents() throws IOException, InterruptedException {
boolean hasPriorityEvent = inputGate.getPriorityEventAvailableFuture().isDone();
while (hasPriorityEvent) {
final Optional<BufferOrEvent> bufferOrEventOpt = pollNext();
checkState(bufferOrEventOpt.isPresent());
final BufferOrEvent bufferOrEvent = bufferOrEventOpt.get();
checkState(bufferOrEvent.hasPriority(), "Should only poll priority events");
hasPriorityEvent = bufferOrEvent.morePriorityEvents();
}
waitForPriorityEvents(inputGate, mailboxExecutor);
}
private void waitForPriorityEvents(InputGate inputGate, MailboxExecutor mailboxExecutor) {
final CompletableFuture<?> priorityEventAvailableFuture = inputGate.getPriorityEventAvailableFuture();
priorityEventAvailableFuture.thenRun(() ->
mailboxExecutor.execute(this::processPriorityEvents, "process priority event @ gate %s", inputGate));
}
@Override
public CompletableFuture<?> getAvailableFuture() {
return inputGate.getAvailableFuture();
}
@Override
private void handleEvent(BufferOrEvent bufferOrEvent) throws IOException {
if (bufferOrEvent.getEvent().getClass() == CheckpointBarrier.class) {
CheckpointBarrier checkpointBarrier = (CheckpointBarrier) bufferOrEvent.getEvent();
barrierHandler.processBarrier(checkpointBarrier, bufferOrEvent.getChannelInfo());
}
else if (bufferOrEvent.getEvent().getClass() == CancelCheckpointMarker.class) {
barrierHandler.processCancellationBarrier((CancelCheckpointMarker) bufferOrEvent.getEvent());
}
else if (bufferOrEvent.getEvent().getClass() == EndOfPartitionEvent.class) {
barrierHandler.processEndOfPartition();
}
}
public CompletableFuture<Void> getAllBarriersReceivedFuture(long checkpointId) {
return barrierHandler.getAllBarriersReceivedFuture(checkpointId);
}
private Optional<BufferOrEvent> handleEmptyBuffer() {
if (inputGate.isFinished()) {
isFinished = true;
}
return Optional.empty();
}
@Override
public boolean isFinished() {
return isFinished;
}
/**
* Cleans up all internally held resources.
*
* @throws IOException Thrown if the cleanup of I/O resources failed.
*/
public void close() throws IOException {
barrierHandler.close();
}
/**
* Gets the ID defining the current pending, or just completed, checkpoint.
*
* @return The ID of the pending of completed checkpoint.
*/
@VisibleForTesting
long getLatestCheckpointId() {
return barrierHandler.getLatestCheckpointId();
}
/**
* Gets the time that the latest alignment took, in nanoseconds.
* If there is currently an alignment in progress, it will return the time spent in the
* current alignment so far.
*
* @return The duration in nanoseconds
*/
@VisibleForTesting
long getAlignmentDurationNanos() {
return barrierHandler.getAlignmentDurationNanos();
}
/**
* @return the time that elapsed, in nanoseconds, between the creation of the latest checkpoint
* and the time when it's first {@link CheckpointBarrier} was received by this {@link InputGate}.
*/
@VisibleForTesting
long getCheckpointStartDelayNanos() {
return barrierHandler.getCheckpointStartDelayNanos();
}
/**
* @return number of underlying input channels.
*/
public int getNumberOfInputChannels() {
return inputGate.getNumberOfInputChannels();
}
@Override
public String toString() {
return barrierHandler.toString();
}
public InputChannel getChannel(int channelIndex) {
return inputGate.getChannel(channelIndex);
}
public List<InputChannelInfo> getChannelInfos() {
return inputGate.getChannelInfos();
}
@VisibleForTesting
CheckpointBarrierHandler getCheckpointBarrierHandler() {
return barrierHandler;
}
} |
```suggestion "Unsupported schema version type: %s", ``` | public Table getTable(String tableName) {
final ObjectIdentifier identifier =
ObjectIdentifier.of(catalogName, databaseName, tableName);
Optional<ContextResolvedTable> table;
if (getSchemaVersion().isPresent()) {
SchemaVersion schemaVersion = getSchemaVersion().get();
if (schemaVersion instanceof FlinkSchemaVersion.TimestampSchemaVersion) {
FlinkSchemaVersion.TimestampSchemaVersion timestampSchemaVersion =
(FlinkSchemaVersion.TimestampSchemaVersion) getSchemaVersion().get();
table = catalogManager.getTable(identifier, timestampSchemaVersion.getTimestamp());
} else {
throw new UnsupportedOperationException(
String.format(
"Unsupported schema version type, the class is %s",
schemaVersion.getClass()));
}
} else {
table = catalogManager.getTable(identifier);
}
return table.map(
lookupResult ->
new CatalogSchemaTable(
lookupResult,
getStatistic(lookupResult, identifier),
isStreamingMode))
.orElse(null);
} | "Unsupported schema version type, the class is %s", | public Table getTable(String tableName) {
final ObjectIdentifier identifier =
ObjectIdentifier.of(catalogName, databaseName, tableName);
Optional<ContextResolvedTable> table;
if (getSchemaVersion().isPresent()) {
SchemaVersion schemaVersion = getSchemaVersion().get();
if (schemaVersion instanceof TimestampSchemaVersion) {
TimestampSchemaVersion timestampSchemaVersion =
(TimestampSchemaVersion) getSchemaVersion().get();
table = catalogManager.getTable(identifier, timestampSchemaVersion.getTimestamp());
} else {
throw new UnsupportedOperationException(
String.format(
"Unsupported schema version type: %s", schemaVersion.getClass()));
}
} else {
table = catalogManager.getTable(identifier);
}
return table.map(
lookupResult ->
new CatalogSchemaTable(
lookupResult,
getStatistic(lookupResult, identifier),
isStreamingMode))
.orElse(null);
} | class DatabaseCalciteSchema extends FlinkSchema {
private final String catalogName;
private final String databaseName;
private final CatalogManager catalogManager;
private final boolean isStreamingMode;
public DatabaseCalciteSchema(
String catalogName,
String databaseName,
CatalogManager catalog,
boolean isStreamingMode) {
this.databaseName = databaseName;
this.catalogName = catalogName;
this.catalogManager = catalog;
this.isStreamingMode = isStreamingMode;
}
@Override
private FlinkStatistic getStatistic(
ContextResolvedTable contextResolvedTable, ObjectIdentifier identifier) {
final ResolvedCatalogBaseTable<?> resolvedBaseTable =
contextResolvedTable.getResolvedTable();
switch (resolvedBaseTable.getTableKind()) {
case TABLE:
return FlinkStatistic.unknown(resolvedBaseTable.getResolvedSchema())
.tableStats(extractTableStats(contextResolvedTable, identifier))
.build();
case VIEW:
default:
return FlinkStatistic.UNKNOWN();
}
}
private TableStats extractTableStats(
ContextResolvedTable lookupResult, ObjectIdentifier identifier) {
if (lookupResult.isTemporary()) {
return TableStats.UNKNOWN;
}
final Catalog catalog = lookupResult.getCatalog().orElseThrow(IllegalStateException::new);
final ObjectPath tablePath = identifier.toObjectPath();
try {
final CatalogTableStatistics tableStatistics = catalog.getTableStatistics(tablePath);
final CatalogColumnStatistics columnStatistics =
catalog.getTableColumnStatistics(tablePath);
return convertToTableStats(tableStatistics, columnStatistics);
} catch (TableNotExistException e) {
throw new ValidationException(
format(
"Could not get statistic for table: [%s, %s, %s]",
identifier.getCatalogName(),
tablePath.getDatabaseName(),
tablePath.getObjectName()),
e);
}
}
@Override
public Set<String> getTableNames() {
return catalogManager.listTables(catalogName, databaseName);
}
@Override
public Schema getSubSchema(String s) {
return null;
}
@Override
public Set<String> getSubSchemaNames() {
return new HashSet<>();
}
@Override
public Expression getExpression(SchemaPlus parentSchema, String name) {
return Schemas.subSchemaExpression(parentSchema, name, getClass());
}
@Override
public boolean isMutable() {
return true;
}
@Override
public DatabaseCalciteSchema copy() {
return new DatabaseCalciteSchema(
catalogName, databaseName, catalogManager, isStreamingMode);
}
} | class DatabaseCalciteSchema extends FlinkSchema {
private final String catalogName;
private final String databaseName;
private final CatalogManager catalogManager;
private final boolean isStreamingMode;
public DatabaseCalciteSchema(
String catalogName,
String databaseName,
CatalogManager catalog,
boolean isStreamingMode) {
this.databaseName = databaseName;
this.catalogName = catalogName;
this.catalogManager = catalog;
this.isStreamingMode = isStreamingMode;
}
@Override
private FlinkStatistic getStatistic(
ContextResolvedTable contextResolvedTable, ObjectIdentifier identifier) {
final ResolvedCatalogBaseTable<?> resolvedBaseTable =
contextResolvedTable.getResolvedTable();
switch (resolvedBaseTable.getTableKind()) {
case TABLE:
return FlinkStatistic.unknown(resolvedBaseTable.getResolvedSchema())
.tableStats(extractTableStats(contextResolvedTable, identifier))
.build();
case VIEW:
default:
return FlinkStatistic.UNKNOWN();
}
}
private TableStats extractTableStats(
ContextResolvedTable lookupResult, ObjectIdentifier identifier) {
if (lookupResult.isTemporary()) {
return TableStats.UNKNOWN;
}
final Catalog catalog = lookupResult.getCatalog().orElseThrow(IllegalStateException::new);
final ObjectPath tablePath = identifier.toObjectPath();
try {
final CatalogTableStatistics tableStatistics = catalog.getTableStatistics(tablePath);
final CatalogColumnStatistics columnStatistics =
catalog.getTableColumnStatistics(tablePath);
return convertToTableStats(tableStatistics, columnStatistics);
} catch (TableNotExistException e) {
throw new ValidationException(
format(
"Could not get statistic for table: [%s, %s, %s]",
identifier.getCatalogName(),
tablePath.getDatabaseName(),
tablePath.getObjectName()),
e);
}
}
@Override
public Set<String> getTableNames() {
return catalogManager.listTables(catalogName, databaseName);
}
@Override
public Schema getSubSchema(String s) {
return null;
}
@Override
public Set<String> getSubSchemaNames() {
return new HashSet<>();
}
@Override
public Expression getExpression(SchemaPlus parentSchema, String name) {
return Schemas.subSchemaExpression(parentSchema, name, getClass());
}
@Override
public boolean isMutable() {
return true;
}
@Override
public DatabaseCalciteSchema copy() {
return new DatabaseCalciteSchema(
catalogName, databaseName, catalogManager, isStreamingMode);
}
} |
Instead of handling build projects inside the catch block, cant we first check for the project root and, branch out into the two separate logics(single file and package) based on the output? | public static List<String> getExistingTypeNames(WorkspaceManager workspaceManager, String filePathUri) {
List<String> existingTypeNames = new ArrayList<>();
if (filePathUri == null) {
return existingTypeNames;
}
Path filePathResolved;
try {
URI filePathUriResolved = new URI(filePathUri);
filePathResolved = Path.of(filePathUriResolved);
} catch (InvalidPathException | URISyntaxException e) {
return existingTypeNames;
}
if (workspaceManager != null && workspaceManager.semanticModel(filePathResolved).isPresent()) {
List<Symbol> moduleSymbols = workspaceManager.semanticModel(filePathResolved).get().moduleSymbols();
moduleSymbols.forEach(symbol -> {
if (symbol.getName().isPresent()) {
existingTypeNames.add(symbol.getName().get());
}
});
return existingTypeNames;
}
Project project;
try {
project = SingleFileProject.load(filePathResolved);
List<Symbol> moduleSymbols =
project.currentPackage().getDefaultModule().getCompilation().getSemanticModel().moduleSymbols();
moduleSymbols.forEach(symbol -> {
if (symbol.getName().isPresent()) {
existingTypeNames.add(symbol.getName().get());
}
});
} catch (ProjectException pe) {
Path projectRoot = ProjectUtils.findProjectRoot(filePathResolved);
if (projectRoot != null) {
try {
project = BuildProject.load(projectRoot);
List<Symbol> moduleSymbols = project.currentPackage()
.module(project.documentId(filePathResolved).moduleId())
.getCompilation().getSemanticModel().moduleSymbols();
moduleSymbols.forEach(symbol -> {
if (symbol.getName().isPresent()) {
existingTypeNames.add(symbol.getName().get());
}
});
} catch (ProjectException pe1) {
return existingTypeNames;
}
}
}
return existingTypeNames;
} | } | public static List<String> getExistingTypeNames(WorkspaceManager workspaceManager, String filePathUri) {
List<String> existingTypeNames = new ArrayList<>();
if (filePathUri == null) {
return existingTypeNames;
}
Path filePathResolved;
try {
URI filePathUriResolved = new URI(filePathUri);
filePathResolved = Path.of(filePathUriResolved);
} catch (InvalidPathException | URISyntaxException e) {
return existingTypeNames;
}
if (workspaceManager != null && workspaceManager.semanticModel(filePathResolved).isPresent()) {
List<Symbol> moduleSymbols = workspaceManager.semanticModel(filePathResolved).get().moduleSymbols();
moduleSymbols.forEach(symbol -> {
if (symbol.getName().isPresent()) {
existingTypeNames.add(symbol.getName().get());
}
});
return existingTypeNames;
}
try {
Project project;
List<Symbol> moduleSymbols;
Path projectRoot = ProjectUtils.findProjectRoot(filePathResolved);
if (projectRoot == null) {
project = SingleFileProject.load(filePathResolved);
moduleSymbols =
project.currentPackage().getDefaultModule().getCompilation().getSemanticModel().moduleSymbols();
moduleSymbols.forEach(symbol -> {
if (symbol.getName().isPresent()) {
existingTypeNames.add(symbol.getName().get());
}
});
} else {
project = BuildProject.load(projectRoot);
moduleSymbols = project.currentPackage()
.module(project.documentId(filePathResolved).moduleId())
.getCompilation().getSemanticModel().moduleSymbols();
moduleSymbols.forEach(symbol -> {
if (symbol.getName().isPresent()) {
existingTypeNames.add(symbol.getName().get());
}
});
}
} catch (ProjectException pe) {
return existingTypeNames;
}
return existingTypeNames;
} | class ConverterUtils {
private ConverterUtils() {}
private static final String ARRAY_RECORD_SUFFIX = "Item";
private static final String QUOTED_IDENTIFIER_PREFIX = "'";
private static final String ESCAPE_NUMERIC_PATTERN = "\\b\\d.*";
private static final List<String> KEYWORDS = SyntaxInfo.keywords();
/**
* This method returns the identifiers with special characters.
*
* @param identifier Identifier name.
* @return {@link String} Special characters escaped identifier.
*/
public static String escapeIdentifier(String identifier) {
if (KEYWORDS.stream().anyMatch(identifier::equals)) {
return "'" + identifier;
} else {
if (identifier.startsWith(QUOTED_IDENTIFIER_PREFIX)) {
identifier = identifier.substring(1);
}
identifier = unescapeUnicodeCodepoints(identifier);
identifier = escapeSpecialCharacters(identifier);
if (identifier.matches(ESCAPE_NUMERIC_PATTERN)) {
identifier = "\\" + identifier;
}
return identifier;
}
}
/**
* This method returns existing Types on a module/file(for single file projects).
*
* @param workspaceManager Workspace manager instance
* @param filePathUri FilePath URI of the/a file in a singleFileProject or module
* @return {@link List<String>} List of already existing Types
*/
/**
* This method returns an alternative fieldName if the given filedName is already exist.
*
* @param fieldName Field name of the JSON Object/Array
* @param isArrayField To denote whether given field is an array or not
* @param existingFieldNames The list of already existing field names
* @param updatedFieldNames The list of updated field names
* @return {@link List<String>} List of already existing Types
*/
public static String getAndUpdateFieldNames(String fieldName, boolean isArrayField, List<String> existingFieldNames,
Map<String, String> updatedFieldNames) {
String updatedFieldName = getUpdatedFieldName(fieldName, isArrayField, existingFieldNames, updatedFieldNames);
if (!fieldName.equals(updatedFieldName)) {
updatedFieldNames.put(fieldName, updatedFieldName);
return updatedFieldName;
}
return fieldName;
}
/**
* This method returns the SyntaxToken corresponding to the JsonPrimitive.
*
* @param value JsonPrimitive that has to be classified.
* @return {@link Token} Classified Syntax Token.
*/
public static Token getPrimitiveTypeName(JsonPrimitive value) {
if (value.isString()) {
return AbstractNodeFactory.createToken(SyntaxKind.STRING_KEYWORD);
} else if (value.isBoolean()) {
return AbstractNodeFactory.createToken(SyntaxKind.BOOLEAN_KEYWORD);
} else if (value.isNumber()) {
String strValue = value.getAsNumber().toString();
if (strValue.contains(".")) {
return AbstractNodeFactory.createToken(SyntaxKind.DECIMAL_KEYWORD);
} else {
return AbstractNodeFactory.createToken(SyntaxKind.INT_KEYWORD);
}
}
return AbstractNodeFactory.createToken(SyntaxKind.ANYDATA_KEYWORD);
}
/**
* This method extracts TypeDescriptorNodes within any UnionTypeDescriptorNodes or ParenthesisedTypeDescriptorNode.
*
* @param typeDescNodes List of Union and Parenthesised TypeDescriptorNodes
* @return {@link List<TypeDescriptorNode>} Extracted SimpleNameReferenceNodes.
*/
public static List<TypeDescriptorNode> extractTypeDescriptorNodes(List<TypeDescriptorNode> typeDescNodes) {
List<TypeDescriptorNode> extractedTypeNames = new ArrayList<>();
for (TypeDescriptorNode typeDescNode : typeDescNodes) {
TypeDescriptorNode extractedTypeDescNode = extractParenthesisedTypeDescNode(typeDescNode);
if (extractedTypeDescNode instanceof UnionTypeDescriptorNode) {
List<TypeDescriptorNode> childTypeDescNodes =
List.of(((UnionTypeDescriptorNode) extractedTypeDescNode).leftTypeDesc(),
((UnionTypeDescriptorNode) extractedTypeDescNode).rightTypeDesc());
addIfNotExist(extractedTypeNames, extractTypeDescriptorNodes(childTypeDescNodes));
} else {
addIfNotExist(extractedTypeNames, List.of(extractedTypeDescNode));
}
}
return extractedTypeNames;
}
/**
* This method returns the sorted TypeDescriptorNode list.
*
* @param typeDescriptorNodes List of TypeDescriptorNodes has to be sorted.
* @return {@link List<TypeDescriptorNode>} The sorted TypeDescriptorNode list.
*/
public static List<TypeDescriptorNode> sortTypeDescriptorNodes(List<TypeDescriptorNode> typeDescriptorNodes) {
List<TypeDescriptorNode> nonArrayNodes = typeDescriptorNodes.stream()
.filter(node -> !(node instanceof ArrayTypeDescriptorNode)).collect(Collectors.toList());
List<TypeDescriptorNode> arrayNodes = typeDescriptorNodes.stream()
.filter(node -> (node instanceof ArrayTypeDescriptorNode)).collect(Collectors.toList());
nonArrayNodes.sort(Comparator.comparing(TypeDescriptorNode::toSourceCode));
arrayNodes.sort((node1, node2) -> {
ArrayTypeDescriptorNode arrayNode1 = (ArrayTypeDescriptorNode) node1;
ArrayTypeDescriptorNode arrayNode2 = (ArrayTypeDescriptorNode) node2;
return getNumberOfDimensions(arrayNode1).equals(getNumberOfDimensions(arrayNode2)) ?
(arrayNode1).memberTypeDesc().toSourceCode()
.compareTo((arrayNode2).memberTypeDesc().toSourceCode()) :
getNumberOfDimensions(arrayNode1) - getNumberOfDimensions(arrayNode2);
});
return Stream.concat(nonArrayNodes.stream(), arrayNodes.stream()).collect(Collectors.toList());
}
/**
* This method returns the memberTypeDesc node of an ArrayTypeDescriptorNode.
*
* @param typeDescNode ArrayTypeDescriptorNode for which it has to be extracted.
* @return {@link TypeDescriptorNode} The memberTypeDesc node of the ArrayTypeDescriptor node.
*/
public static TypeDescriptorNode extractArrayTypeDescNode(TypeDescriptorNode typeDescNode) {
if (typeDescNode.kind().equals(SyntaxKind.ARRAY_TYPE_DESC)) {
ArrayTypeDescriptorNode arrayTypeDescNode = (ArrayTypeDescriptorNode) typeDescNode;
return extractArrayTypeDescNode(arrayTypeDescNode.memberTypeDesc());
} else {
return typeDescNode;
}
}
/**
* This method returns a list of TypeDescriptorNodes extracted from a UnionTypeDescriptorNode.
*
* @param typeDescNode UnionTypeDescriptorNode for which that has to be extracted.
* @return {@link List<TypeDescriptorNode>} The list of extracted TypeDescriptorNodes.
*/
public static List<TypeDescriptorNode> extractUnionTypeDescNode(TypeDescriptorNode typeDescNode) {
List<TypeDescriptorNode> extractedTypeDescNodes = new ArrayList<>();
TypeDescriptorNode extractedTypeDescNode = typeDescNode;
if (typeDescNode.kind().equals(SyntaxKind.PARENTHESISED_TYPE_DESC)) {
extractedTypeDescNode = extractParenthesisedTypeDescNode(typeDescNode);
}
if (extractedTypeDescNode.kind().equals(SyntaxKind.UNION_TYPE_DESC)) {
UnionTypeDescriptorNode unionTypeDescNode = (UnionTypeDescriptorNode) extractedTypeDescNode;
TypeDescriptorNode leftTypeDescNode = unionTypeDescNode.leftTypeDesc();
TypeDescriptorNode rightTypeDescNode = unionTypeDescNode.rightTypeDesc();
extractedTypeDescNodes.addAll(extractUnionTypeDescNode(leftTypeDescNode));
extractedTypeDescNodes.addAll(extractUnionTypeDescNode(rightTypeDescNode));
} else {
extractedTypeDescNodes.add(extractedTypeDescNode);
}
return extractedTypeDescNodes;
}
/**
* This method returns the number of dimensions of an ArrayTypeDescriptorNode.
*
* @param arrayNode ArrayTypeDescriptorNode for which the no. of dimensions has to be calculated.
* @return {@link Integer} The total no. of dimensions of the ArrayTypeDescriptorNode.
*/
public static Integer getNumberOfDimensions(ArrayTypeDescriptorNode arrayNode) {
int totalDimensions = arrayNode.dimensions().size();
if (arrayNode.memberTypeDesc() instanceof ArrayTypeDescriptorNode) {
totalDimensions += getNumberOfDimensions((ArrayTypeDescriptorNode) arrayNode.memberTypeDesc());
}
return totalDimensions;
}
private static TypeDescriptorNode extractParenthesisedTypeDescNode(TypeDescriptorNode typeDescNode) {
if (typeDescNode instanceof ParenthesisedTypeDescriptorNode) {
return extractParenthesisedTypeDescNode(((ParenthesisedTypeDescriptorNode) typeDescNode).typedesc());
} else {
return typeDescNode;
}
}
private static void addIfNotExist(List<TypeDescriptorNode> typeDescNodes,
List<TypeDescriptorNode> typeDescNodesToBeInserted) {
for (TypeDescriptorNode typeDescNodeToBeInserted : typeDescNodesToBeInserted) {
if (typeDescNodes.stream().noneMatch(typeDescNode -> typeDescNode.toSourceCode()
.equals(typeDescNodeToBeInserted.toSourceCode()))) {
typeDescNodes.add(typeDescNodeToBeInserted);
}
}
}
private static String getUpdatedFieldName(String fieldName, boolean isArrayField, List<String> existingFieldNames,
Map<String, String> updatedFieldNames) {
if (updatedFieldNames.containsKey(fieldName)) {
return updatedFieldNames.get(fieldName);
}
if (!existingFieldNames.contains(fieldName) && !updatedFieldNames.containsValue(fieldName)) {
return fieldName;
} else {
String extractedFieldName = isArrayField ?
fieldName.substring(0, fieldName.length() - ARRAY_RECORD_SUFFIX.length()) : fieldName;
String[] fieldNameSplit = extractedFieldName.split("_");
String numericSuffix = fieldNameSplit[fieldNameSplit.length - 1];
if (NumberUtils.isParsable(numericSuffix)) {
return getUpdatedFieldName(String.join("_",
Arrays.copyOfRange(fieldNameSplit, 0, fieldNameSplit.length - 1)) + "_" +
String.format("%02d", Integer.parseInt(numericSuffix) + 1) +
(isArrayField ? ARRAY_RECORD_SUFFIX : ""),
isArrayField, existingFieldNames, updatedFieldNames);
} else {
return getUpdatedFieldName(extractedFieldName + "_01" + (isArrayField ? ARRAY_RECORD_SUFFIX : ""),
isArrayField, existingFieldNames, updatedFieldNames);
}
}
}
} | class ConverterUtils {
private ConverterUtils() {}
private static final String ARRAY_RECORD_SUFFIX = "Item";
private static final String QUOTED_IDENTIFIER_PREFIX = "'";
private static final String ESCAPE_NUMERIC_PATTERN = "\\b\\d.*";
private static final List<String> KEYWORDS = SyntaxInfo.keywords();
/**
* This method returns the identifiers with special characters.
*
* @param identifier Identifier name.
* @return {@link String} Special characters escaped identifier.
*/
public static String escapeIdentifier(String identifier) {
if (KEYWORDS.stream().anyMatch(identifier::equals)) {
return "'" + identifier;
} else {
if (identifier.startsWith(QUOTED_IDENTIFIER_PREFIX)) {
identifier = identifier.substring(1);
}
identifier = unescapeUnicodeCodepoints(identifier);
identifier = escapeSpecialCharacters(identifier);
if (identifier.matches(ESCAPE_NUMERIC_PATTERN)) {
identifier = "\\" + identifier;
}
return identifier;
}
}
/**
* This method returns existing Types on a module/file(for single file projects).
*
* @param workspaceManager Workspace manager instance
* @param filePathUri FilePath URI of the/a file in a singleFileProject or module
* @return {@link List<String>} List of already existing Types
*/
/**
* This method returns an alternative fieldName if the given filedName is already exist.
*
* @param fieldName Field name of the JSON Object/Array
* @param isArrayField To denote whether given field is an array or not
* @param existingFieldNames The list of already existing field names
* @param updatedFieldNames The list of updated field names
* @return {@link List<String>} List of already existing Types
*/
public static String getAndUpdateFieldNames(String fieldName, boolean isArrayField, List<String> existingFieldNames,
Map<String, String> updatedFieldNames) {
String updatedFieldName = getUpdatedFieldName(fieldName, isArrayField, existingFieldNames, updatedFieldNames);
if (!fieldName.equals(updatedFieldName)) {
updatedFieldNames.put(fieldName, updatedFieldName);
return updatedFieldName;
}
return fieldName;
}
/**
* This method returns the SyntaxToken corresponding to the JsonPrimitive.
*
* @param value JsonPrimitive that has to be classified.
* @return {@link Token} Classified Syntax Token.
*/
public static Token getPrimitiveTypeName(JsonPrimitive value) {
if (value.isString()) {
return AbstractNodeFactory.createToken(SyntaxKind.STRING_KEYWORD);
} else if (value.isBoolean()) {
return AbstractNodeFactory.createToken(SyntaxKind.BOOLEAN_KEYWORD);
} else if (value.isNumber()) {
String strValue = value.getAsNumber().toString();
if (strValue.contains(".")) {
return AbstractNodeFactory.createToken(SyntaxKind.DECIMAL_KEYWORD);
} else {
return AbstractNodeFactory.createToken(SyntaxKind.INT_KEYWORD);
}
}
return AbstractNodeFactory.createToken(SyntaxKind.ANYDATA_KEYWORD);
}
/**
* This method extracts TypeDescriptorNodes within any UnionTypeDescriptorNodes or ParenthesisedTypeDescriptorNode.
*
* @param typeDescNodes List of Union and Parenthesised TypeDescriptorNodes
* @return {@link List<TypeDescriptorNode>} Extracted SimpleNameReferenceNodes.
*/
public static List<TypeDescriptorNode> extractTypeDescriptorNodes(List<TypeDescriptorNode> typeDescNodes) {
List<TypeDescriptorNode> extractedTypeNames = new ArrayList<>();
for (TypeDescriptorNode typeDescNode : typeDescNodes) {
TypeDescriptorNode extractedTypeDescNode = extractParenthesisedTypeDescNode(typeDescNode);
if (extractedTypeDescNode instanceof UnionTypeDescriptorNode) {
List<TypeDescriptorNode> childTypeDescNodes =
List.of(((UnionTypeDescriptorNode) extractedTypeDescNode).leftTypeDesc(),
((UnionTypeDescriptorNode) extractedTypeDescNode).rightTypeDesc());
addIfNotExist(extractedTypeNames, extractTypeDescriptorNodes(childTypeDescNodes));
} else {
addIfNotExist(extractedTypeNames, List.of(extractedTypeDescNode));
}
}
return extractedTypeNames;
}
/**
* This method returns the sorted TypeDescriptorNode list.
*
* @param typeDescriptorNodes List of TypeDescriptorNodes has to be sorted.
* @return {@link List<TypeDescriptorNode>} The sorted TypeDescriptorNode list.
*/
public static List<TypeDescriptorNode> sortTypeDescriptorNodes(List<TypeDescriptorNode> typeDescriptorNodes) {
List<TypeDescriptorNode> nonArrayNodes = typeDescriptorNodes.stream()
.filter(node -> !(node instanceof ArrayTypeDescriptorNode)).collect(Collectors.toList());
List<TypeDescriptorNode> arrayNodes = typeDescriptorNodes.stream()
.filter(node -> (node instanceof ArrayTypeDescriptorNode)).collect(Collectors.toList());
nonArrayNodes.sort(Comparator.comparing(TypeDescriptorNode::toSourceCode));
arrayNodes.sort((node1, node2) -> {
ArrayTypeDescriptorNode arrayNode1 = (ArrayTypeDescriptorNode) node1;
ArrayTypeDescriptorNode arrayNode2 = (ArrayTypeDescriptorNode) node2;
return getNumberOfDimensions(arrayNode1).equals(getNumberOfDimensions(arrayNode2)) ?
(arrayNode1).memberTypeDesc().toSourceCode()
.compareTo((arrayNode2).memberTypeDesc().toSourceCode()) :
getNumberOfDimensions(arrayNode1) - getNumberOfDimensions(arrayNode2);
});
return Stream.concat(nonArrayNodes.stream(), arrayNodes.stream()).collect(Collectors.toList());
}
/**
* This method returns the memberTypeDesc node of an ArrayTypeDescriptorNode.
*
* @param typeDescNode ArrayTypeDescriptorNode for which it has to be extracted.
* @return {@link TypeDescriptorNode} The memberTypeDesc node of the ArrayTypeDescriptor node.
*/
public static TypeDescriptorNode extractArrayTypeDescNode(TypeDescriptorNode typeDescNode) {
if (typeDescNode.kind().equals(SyntaxKind.ARRAY_TYPE_DESC)) {
ArrayTypeDescriptorNode arrayTypeDescNode = (ArrayTypeDescriptorNode) typeDescNode;
return extractArrayTypeDescNode(arrayTypeDescNode.memberTypeDesc());
} else {
return typeDescNode;
}
}
/**
* This method returns a list of TypeDescriptorNodes extracted from a UnionTypeDescriptorNode.
*
* @param typeDescNode UnionTypeDescriptorNode for which that has to be extracted.
* @return {@link List<TypeDescriptorNode>} The list of extracted TypeDescriptorNodes.
*/
public static List<TypeDescriptorNode> extractUnionTypeDescNode(TypeDescriptorNode typeDescNode) {
List<TypeDescriptorNode> extractedTypeDescNodes = new ArrayList<>();
TypeDescriptorNode extractedTypeDescNode = typeDescNode;
if (typeDescNode.kind().equals(SyntaxKind.PARENTHESISED_TYPE_DESC)) {
extractedTypeDescNode = extractParenthesisedTypeDescNode(typeDescNode);
}
if (extractedTypeDescNode.kind().equals(SyntaxKind.UNION_TYPE_DESC)) {
UnionTypeDescriptorNode unionTypeDescNode = (UnionTypeDescriptorNode) extractedTypeDescNode;
TypeDescriptorNode leftTypeDescNode = unionTypeDescNode.leftTypeDesc();
TypeDescriptorNode rightTypeDescNode = unionTypeDescNode.rightTypeDesc();
extractedTypeDescNodes.addAll(extractUnionTypeDescNode(leftTypeDescNode));
extractedTypeDescNodes.addAll(extractUnionTypeDescNode(rightTypeDescNode));
} else {
extractedTypeDescNodes.add(extractedTypeDescNode);
}
return extractedTypeDescNodes;
}
/**
* This method returns the number of dimensions of an ArrayTypeDescriptorNode.
*
* @param arrayNode ArrayTypeDescriptorNode for which the no. of dimensions has to be calculated.
* @return {@link Integer} The total no. of dimensions of the ArrayTypeDescriptorNode.
*/
public static Integer getNumberOfDimensions(ArrayTypeDescriptorNode arrayNode) {
int totalDimensions = arrayNode.dimensions().size();
if (arrayNode.memberTypeDesc() instanceof ArrayTypeDescriptorNode) {
totalDimensions += getNumberOfDimensions((ArrayTypeDescriptorNode) arrayNode.memberTypeDesc());
}
return totalDimensions;
}
private static TypeDescriptorNode extractParenthesisedTypeDescNode(TypeDescriptorNode typeDescNode) {
if (typeDescNode instanceof ParenthesisedTypeDescriptorNode) {
return extractParenthesisedTypeDescNode(((ParenthesisedTypeDescriptorNode) typeDescNode).typedesc());
} else {
return typeDescNode;
}
}
private static void addIfNotExist(List<TypeDescriptorNode> typeDescNodes,
List<TypeDescriptorNode> typeDescNodesToBeInserted) {
for (TypeDescriptorNode typeDescNodeToBeInserted : typeDescNodesToBeInserted) {
if (typeDescNodes.stream().noneMatch(typeDescNode -> typeDescNode.toSourceCode()
.equals(typeDescNodeToBeInserted.toSourceCode()))) {
typeDescNodes.add(typeDescNodeToBeInserted);
}
}
}
private static String getUpdatedFieldName(String fieldName, boolean isArrayField, List<String> existingFieldNames,
Map<String, String> updatedFieldNames) {
if (updatedFieldNames.containsKey(fieldName)) {
return updatedFieldNames.get(fieldName);
}
if (!existingFieldNames.contains(fieldName) && !updatedFieldNames.containsValue(fieldName)) {
return fieldName;
} else {
String[] fieldNameSplit = fieldName.split("_");
String numericSuffix = fieldNameSplit[fieldNameSplit.length - 1];
if (NumberUtils.isParsable(numericSuffix)) {
return getUpdatedFieldName(String.join("_",
Arrays.copyOfRange(fieldNameSplit, 0, fieldNameSplit.length - 1)) + "_" +
String.format("%02d", Integer.parseInt(numericSuffix) + 1), isArrayField,
existingFieldNames, updatedFieldNames);
} else {
return getUpdatedFieldName(fieldName + "_01", isArrayField, existingFieldNames,
updatedFieldNames);
}
}
}
} |
Sure, if debug is actually enabled. | void processCodestartDir(final Path sourceDirectory, final Map<String, Object> finalData) {
log.debug(() -> "processing dir: " + sourceDirectory.toString());
final Collection<Path> sources = findSources(sourceDirectory);
for (Path sourcePath : sources) {
final Path relativeSourcePath = sourceDirectory.relativize(sourcePath);
if (!Files.isDirectory(sourcePath)) {
log.debug(() -> "found source file: " + relativeSourcePath.toString());
final String sourceFileName = sourcePath.getFileName().toString();
final Optional<CodestartFileReader> possibleReader = CodestartFileReader.ALL.stream()
.filter(r -> r.matches(sourceFileName))
.findFirst();
final CodestartFileReader reader = possibleReader.orElse(CodestartFileReader.DEFAULT);
log.debug(() -> "using reader: " + reader.getClass().getName());
final String targetFileName = reader.cleanFileName(sourceFileName);
final Path relativeTargetPath = relativeSourcePath.getNameCount() > 1
? relativeSourcePath.getParent().resolve(targetFileName)
: Paths.get(targetFileName);
final boolean hasFileStrategyHandler = getStrategy(relativeTargetPath.toString()).isPresent();
try {
final String processedRelativeTargetPath = CodestartPathProcessor.process(relativeTargetPath.toString(),
finalData);
if (!possibleReader.isPresent() && !hasFileStrategyHandler) {
final Path targetPath = targetDirectory.resolve(processedRelativeTargetPath);
log.debug(() -> "copy static file: " + sourcePath.toString() + "->" + targetPath.toString());
getSelectedDefaultStrategy().copyStaticFile(sourcePath, targetPath);
continue;
}
final Optional<String> content = reader.read(sourceDirectory, relativeSourcePath,
languageName, finalData);
if (content.isPresent()) {
log.debug(() -> "adding file to processing stack: " + sourcePath.toString());
this.files.putIfAbsent(processedRelativeTargetPath, new ArrayList<>());
this.files.get(processedRelativeTargetPath)
.add(new CodestartFile(processedRelativeTargetPath, content.get()));
} else {
log.debug(() -> "ignoring file: " + sourcePath.toString());
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
} | log.debug(() -> "processing dir: " + sourceDirectory.toString()); | void processCodestartDir(final Path sourceDirectory, final Map<String, Object> finalData) {
log.debug("processing dir: %s", sourceDirectory.toString());
final Collection<Path> sources = findSources(sourceDirectory);
for (Path sourcePath : sources) {
final Path relativeSourcePath = sourceDirectory.relativize(sourcePath);
if (!Files.isDirectory(sourcePath)) {
log.debug("found source file: %s", relativeSourcePath);
final String sourceFileName = sourcePath.getFileName().toString();
final Optional<CodestartFileReader> possibleReader = CodestartFileReader.ALL.stream()
.filter(r -> r.matches(sourceFileName))
.findFirst();
final CodestartFileReader reader = possibleReader.orElse(CodestartFileReader.DEFAULT);
log.debug("using reader: %s", reader.getClass().getName());
final String targetFileName = reader.cleanFileName(sourceFileName);
final Path relativeTargetPath = relativeSourcePath.getNameCount() > 1
? relativeSourcePath.getParent().resolve(targetFileName)
: Paths.get(targetFileName);
final boolean hasFileStrategyHandler = getStrategy(relativeTargetPath.toString()).isPresent();
try {
final String processedRelativeTargetPath = CodestartPathProcessor.process(relativeTargetPath.toString(),
finalData);
if (!possibleReader.isPresent() && !hasFileStrategyHandler) {
final Path targetPath = targetDirectory.resolve(processedRelativeTargetPath);
log.debug("copy static file: %s -> %s", sourcePath, targetPath);
getSelectedDefaultStrategy().copyStaticFile(sourcePath, targetPath);
continue;
}
final Optional<String> content = reader.read(sourceDirectory, relativeSourcePath,
languageName, finalData);
if (content.isPresent()) {
log.debug("adding file to processing stack: %s", sourcePath);
this.files.putIfAbsent(processedRelativeTargetPath, new ArrayList<>());
this.files.get(processedRelativeTargetPath)
.add(new CodestartFile(processedRelativeTargetPath, content.get()));
} else {
log.debug("ignoring file: %s", sourcePath);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
} | class CodestartProcessor {
private final MessageWriter log;
private final CodestartResourceLoader resourceLoader;
private final String languageName;
private final Path targetDirectory;
private final List<CodestartFileStrategy> strategies;
private final Map<String, Object> data;
private final Map<String, List<CodestartFile>> files = new LinkedHashMap<>();
CodestartProcessor(final MessageWriter log,
final CodestartResourceLoader resourceLoader,
final String languageName,
final Path targetDirectory,
final List<CodestartFileStrategy> strategies,
final Map<String, Object> data) {
this.log = log;
this.resourceLoader = resourceLoader;
this.languageName = languageName;
this.targetDirectory = targetDirectory;
this.strategies = strategies;
this.data = data;
}
void process(final Codestart codestart) throws IOException {
log.debug(() -> "processing codestart '" + codestart.getName() + "'...");
addBuiltinData();
resourceLoader.loadResourceAsPath(codestart.getResourceDir(), p -> {
final Path baseDir = p.resolve(BASE_LANGUAGE);
final Path languageDir = p.resolve(languageName);
final Map<String, Object> finalData = CodestartData.buildCodestartData(codestart, languageName, data);
log.debug(() -> "codestart data: " + finalData.toString());
Stream.of(baseDir, languageDir)
.filter(Files::isDirectory)
.forEach(dirPath -> processCodestartDir(dirPath, finalData));
return null;
});
}
void addBuiltinData() {
data.put("gen-info", Collections.singletonMap("time", System.currentTimeMillis()));
}
private List<Path> findSources(Path sourceDirectory) {
try (final Stream<Path> pathStream = Files.walk(sourceDirectory)) {
return pathStream
.filter(path -> !path.equals(sourceDirectory))
.collect(Collectors.toList());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
void checkTargetDir() throws IOException {
if (!Files.exists(targetDirectory)) {
boolean mkdirStatus = targetDirectory.toFile().mkdirs();
if (!mkdirStatus) {
throw new IOException("Failed to create the project directory: " + targetDirectory);
}
return;
}
if (!Files.isDirectory(targetDirectory)) {
throw new IOException("Project path needs to point to a directory: " + targetDirectory);
}
final String[] files = targetDirectory.toFile().list();
if (files != null && files.length > 0) {
throw new IOException("You can't create a project when the directory is not empty: " + targetDirectory);
}
}
public void writeFiles() throws IOException {
for (Map.Entry<String, List<CodestartFile>> e : files.entrySet()) {
final String relativePath = e.getKey();
final CodestartFileStrategyHandler strategy = getStrategy(relativePath).orElse(getSelectedDefaultStrategy());
log.debug(() -> "processing file: " + relativePath + " with strategy: " + strategy.name());
strategy.process(targetDirectory, relativePath, e.getValue(), data);
}
}
DefaultCodestartFileStrategyHandler getSelectedDefaultStrategy() {
for (CodestartFileStrategy codestartFileStrategy : strategies) {
if (Objects.equals(codestartFileStrategy.getFilter(), "*")) {
if (codestartFileStrategy.getHandler() instanceof DefaultCodestartFileStrategyHandler) {
return (DefaultCodestartFileStrategyHandler) codestartFileStrategy.getHandler();
}
throw new CodestartDefinitionException(
codestartFileStrategy.getHandler().name() + " can't be used as '*' file strategy");
}
}
return CodestartFileStrategyHandler.DEFAULT_STRATEGY;
}
Optional<CodestartFileStrategyHandler> getStrategy(final String key) {
for (CodestartFileStrategy codestartFileStrategy : strategies) {
if (codestartFileStrategy.test(key)) {
return Optional.of(codestartFileStrategy.getHandler());
}
}
return Optional.empty();
}
static List<CodestartFileStrategy> buildStrategies(Map<String, String> spec) {
final List<CodestartFileStrategy> codestartFileStrategyHandlers = new ArrayList<>(spec.size());
for (Map.Entry<String, String> entry : spec.entrySet()) {
final CodestartFileStrategyHandler handler = CodestartFileStrategyHandler.BY_NAME.get(entry.getValue());
if (handler == null) {
throw new CodestartDefinitionException("ConflictStrategyHandler named '" + entry.getValue()
+ "' not found. Used with filter '" + entry.getKey() + "'");
}
codestartFileStrategyHandlers.add(new CodestartFileStrategy(entry.getKey(), handler));
}
return codestartFileStrategyHandlers;
}
} | class CodestartProcessor {
private final MessageWriter log;
private final CodestartResourceLoader resourceLoader;
private final String languageName;
private final Path targetDirectory;
private final List<CodestartFileStrategy> strategies;
private final Map<String, Object> data;
private final Map<String, List<CodestartFile>> files = new LinkedHashMap<>();
CodestartProcessor(final MessageWriter log,
final CodestartResourceLoader resourceLoader,
final String languageName,
final Path targetDirectory,
final List<CodestartFileStrategy> strategies,
final Map<String, Object> data) {
this.log = log;
this.resourceLoader = resourceLoader;
this.languageName = languageName;
this.targetDirectory = targetDirectory;
this.strategies = strategies;
this.data = data;
}
void process(final Codestart codestart) throws IOException {
log.debug("processing codestart '%s'...", codestart.getName());
addBuiltinData();
resourceLoader.loadResourceAsPath(codestart.getResourceDir(), p -> {
final Path baseDir = p.resolve(BASE_LANGUAGE);
final Path languageDir = p.resolve(languageName);
final Map<String, Object> finalData = CodestartData.buildCodestartData(codestart, languageName, data);
log.debug("codestart data: %s", finalData);
Stream.of(baseDir, languageDir)
.filter(Files::isDirectory)
.forEach(dirPath -> processCodestartDir(dirPath, finalData));
return null;
});
}
void addBuiltinData() {
data.put("gen-info", Collections.singletonMap("time", System.currentTimeMillis()));
}
private List<Path> findSources(Path sourceDirectory) {
try (final Stream<Path> pathStream = Files.walk(sourceDirectory)) {
return pathStream
.filter(path -> !path.equals(sourceDirectory))
.collect(Collectors.toList());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
void checkTargetDir() throws IOException {
if (!Files.exists(targetDirectory)) {
boolean mkdirStatus = targetDirectory.toFile().mkdirs();
if (!mkdirStatus) {
throw new IOException("Failed to create the project directory: " + targetDirectory);
}
return;
}
if (!Files.isDirectory(targetDirectory)) {
throw new IOException("Project path needs to point to a directory: " + targetDirectory);
}
final String[] files = targetDirectory.toFile().list();
if (files != null && files.length > 0) {
throw new IOException("You can't create a project when the directory is not empty: " + targetDirectory);
}
}
public void writeFiles() throws IOException {
for (Map.Entry<String, List<CodestartFile>> e : files.entrySet()) {
final String relativePath = e.getKey();
final CodestartFileStrategyHandler strategy = getStrategy(relativePath).orElse(getSelectedDefaultStrategy());
log.debug("processing file '%s' with strategy %s", relativePath, strategy.name());
strategy.process(targetDirectory, relativePath, e.getValue(), data);
}
}
DefaultCodestartFileStrategyHandler getSelectedDefaultStrategy() {
for (CodestartFileStrategy codestartFileStrategy : strategies) {
if (Objects.equals(codestartFileStrategy.getFilter(), "*")) {
if (codestartFileStrategy.getHandler() instanceof DefaultCodestartFileStrategyHandler) {
return (DefaultCodestartFileStrategyHandler) codestartFileStrategy.getHandler();
}
throw new CodestartDefinitionException(
codestartFileStrategy.getHandler().name() + " can't be used as '*' file strategy");
}
}
return CodestartFileStrategyHandler.DEFAULT_STRATEGY;
}
Optional<CodestartFileStrategyHandler> getStrategy(final String key) {
for (CodestartFileStrategy codestartFileStrategy : strategies) {
if (codestartFileStrategy.test(key)) {
return Optional.of(codestartFileStrategy.getHandler());
}
}
return Optional.empty();
}
static List<CodestartFileStrategy> buildStrategies(Map<String, String> spec) {
final List<CodestartFileStrategy> codestartFileStrategyHandlers = new ArrayList<>(spec.size());
for (Map.Entry<String, String> entry : spec.entrySet()) {
final CodestartFileStrategyHandler handler = CodestartFileStrategyHandler.BY_NAME.get(entry.getValue());
if (handler == null) {
throw new CodestartDefinitionException("ConflictStrategyHandler named '" + entry.getValue()
+ "' not found. Used with filter '" + entry.getKey() + "'");
}
codestartFileStrategyHandlers.add(new CodestartFileStrategy(entry.getKey(), handler));
}
return codestartFileStrategyHandlers;
}
} |
```suggestion // Reads the next frame when `readyOnConnect` is true or `isReady` is true. ``` | public void onSuccess(WebSocketConnection webSocketConnection, HttpCarbonResponse carbonResponse) {
RetryContext retryConfig = null;
if (WebSocketUtil.hasRetryConfig(webSocketClient)) {
retryConfig = (RetryContext) webSocketClient.getNativeData(WebSocketConstants.RETRY_CONFIG);
}
ObjectValue webSocketConnector;
if (retryConfig != null && !retryConfig.isFirstConnectionMadeSuccessfully()) {
webSocketConnector = BallerinaValues.createObjectValue(HttpConstants.PROTOCOL_HTTP_PKG_ID,
WebSocketConstants.WEBSOCKET_CONNECTOR);
} else {
webSocketConnector = (ObjectValue) webSocketClient.get(WebSocketConstants.CLIENT_CONNECTOR_FIELD);
}
setWebSocketClient(webSocketClient, carbonResponse, webSocketConnection, retryConfig);
WebSocketConnectionInfo connectionInfo = getWebSocketOpenConnectionInfo(webSocketConnection,
webSocketConnector,
webSocketClient, wsService);
clientConnectorListener.setConnectionInfo(connectionInfo);
readNextFrame(readyOnConnect, webSocketClient, webSocketConnection, retryConfig);
if (countDownLatch == null) {
countDownForHandshake(webSocketClient);
} else {
countDownLatch.countDown();
}
WebSocketObservabilityUtil.observeConnection(connectionInfo);
logger.debug(WebSocketConstants.LOG_MESSAGE, CONNECTED_TO, webSocketClient.getStringValue(
WebSocketConstants.CLIENT_URL_CONFIG));
if (retryConfig != null) {
setReconnectContexValue(retryConfig);
}
} | public void onSuccess(WebSocketConnection webSocketConnection, HttpCarbonResponse carbonResponse) {
webSocketClient.set(WebSocketConstants.CLIENT_RESPONSE_FIELD,
HttpUtil.createResponseStruct(carbonResponse));
ObjectValue webSocketConnector = BallerinaValues.createObjectValue(WebSocketConstants.PROTOCOL_HTTP_PKG_ID,
WebSocketConstants.WEBSOCKET_CONNECTOR);
WebSocketConnectionInfo connectionInfo = WebSocketUtil.getWebSocketOpenConnectionInfo(webSocketConnection,
webSocketConnector, webSocketClient, wsService);
WebSocketUtil.populateWebSocketEndpoint(webSocketConnection, webSocketClient);
clientConnectorListener.setConnectionInfo(connectionInfo);
if (readyOnConnect) {
WebSocketUtil.readFirstFrame(webSocketConnection, webSocketClient);
}
countDownLatch.countDown();
WebSocketObservabilityUtil.observeConnection(connectionInfo);
} | class WebSocketClientHandshakeListener implements ClientHandshakeListener {
private final WebSocketService wsService;
private final WebSocketClientConnectorListener clientConnectorListener;
private final boolean readyOnConnect;
private final ObjectValue webSocketClient;
private final CountDownLatch countDownLatch;
private static final Logger logger = LoggerFactory.getLogger(WebSocketClientHandshakeListener.class);
private static final String CONNECTED_TO = "Connected to ";
public WebSocketClientHandshakeListener(ObjectValue webSocketClient,
WebSocketService wsService,
WebSocketClientConnectorListener clientConnectorListener,
boolean readyOnConnect, CountDownLatch countDownLatch) {
this.webSocketClient = webSocketClient;
this.wsService = wsService;
this.clientConnectorListener = clientConnectorListener;
this.readyOnConnect = readyOnConnect;
this.countDownLatch = countDownLatch;
}
@Override
@Override
public void onError(Throwable throwable, HttpCarbonResponse response) {
if (response != null) {
webSocketClient.set(WebSocketConstants.CLIENT_RESPONSE_FIELD, HttpUtil.createResponseStruct(response));
}
ObjectValue webSocketConnector = BallerinaValues.createObjectValue(PROTOCOL_HTTP_PKG_ID,
WebSocketConstants.WEBSOCKET_CONNECTOR);
WebSocketConnectionInfo connectionInfo = getWebSocketOpenConnectionInfo(null,
webSocketConnector, webSocketClient, wsService);
if (countDownLatch != null) {
countDownLatch.countDown();
}
if (WebSocketUtil.hasRetryConfig(webSocketClient) && throwable instanceof IOException &&
WebSocketUtil.reconnect(connectionInfo)) {
return;
}
dispatchOnError(connectionInfo, throwable);
}
private static WebSocketConnectionInfo getWebSocketOpenConnectionInfo(WebSocketConnection webSocketConnection,
ObjectValue webSocketConnector,
ObjectValue webSocketClient,
WebSocketService wsService) {
WebSocketConnectionInfo connectionInfo = new WebSocketConnectionInfo(
wsService, webSocketConnection, webSocketClient);
webSocketConnector.addNativeData(WebSocketConstants.NATIVE_DATA_WEBSOCKET_CONNECTION_INFO, connectionInfo);
webSocketClient.set(WebSocketConstants.CLIENT_CONNECTOR_FIELD, webSocketConnector);
return connectionInfo;
}
private static void setWebSocketClient(ObjectValue webSocketClient, HttpCarbonResponse carbonResponse,
WebSocketConnection webSocketConnection, RetryContext retryConfig) {
webSocketClient.set(WebSocketConstants.CLIENT_RESPONSE_FIELD, HttpUtil.createResponseStruct(carbonResponse));
if (retryConfig != null && retryConfig.isFirstConnectionMadeSuccessfully()) {
webSocketClient.set(WebSocketConstants.LISTENER_ID_FIELD, webSocketConnection.getChannelId());
} else {
WebSocketUtil.populateWebSocketEndpoint(webSocketConnection, webSocketClient);
}
}
/**
* Call the readNextFrame().
*
* @param readyOnConnect ready on connect
* @param webSocketClient webSocket client
* @param webSocketConnection webSocket connection
*/
private static void readNextFrame(boolean readyOnConnect, ObjectValue webSocketClient,
WebSocketConnection webSocketConnection, RetryContext retryConfig) {
if (readyOnConnect || ((boolean) (webSocketClient.get(WebSocketConstants.CONNECTOR_IS_READY_FIELD)))) {
if (retryConfig != null && !retryConfig.isFirstConnectionMadeSuccessfully()) {
WebSocketUtil.readFirstFrame(webSocketConnection, webSocketClient);
} else {
webSocketConnection.readNextFrame();
}
}
}
/**
* Count Down the initialised countDownLatch.
*
* @param webSocketClient webSocket client
*/
private static void countDownForHandshake(ObjectValue webSocketClient) {
if (webSocketClient.getNativeData(WebSocketConstants.COUNT_DOWN_LATCH) != null) {
((CountDownLatch) webSocketClient.getNativeData(WebSocketConstants.COUNT_DOWN_LATCH)).countDown();
webSocketClient.addNativeData(WebSocketConstants.COUNT_DOWN_LATCH, null);
}
}
/**
* Set the value into the retryContext.
*
* @param retryConfig retry context
*/
private void setReconnectContexValue(RetryContext retryConfig) {
if (!retryConfig.isFirstConnectionMadeSuccessfully()) {
retryConfig.setFirstConnectionMadeSuccessfully();
}
retryConfig.setReconnectAttempts(0);
}
private void dispatchOnError(WebSocketConnectionInfo connectionInfo, Throwable throwable) {
countDownForHandshake(webSocketClient);
WebSocketResourceDispatcher.dispatchOnError(connectionInfo, throwable);
}
} | class WebSocketClientHandshakeListener implements ClientHandshakeListener {
private final WebSocketService wsService;
private final WebSocketClientConnectorListener clientConnectorListener;
private final boolean readyOnConnect;
private final ObjectValue webSocketClient;
private CountDownLatch countDownLatch;
public WebSocketClientHandshakeListener(ObjectValue webSocketClient,
WebSocketService wsService,
WebSocketClientConnectorListener clientConnectorListener,
boolean readyOnConnect, CountDownLatch countDownLatch) {
this.webSocketClient = webSocketClient;
this.wsService = wsService;
this.clientConnectorListener = clientConnectorListener;
this.readyOnConnect = readyOnConnect;
this.countDownLatch = countDownLatch;
}
@Override
@Override
public void onError(Throwable throwable, HttpCarbonResponse response) {
if (response != null) {
webSocketClient.set(WebSocketConstants.CLIENT_RESPONSE_FIELD, HttpUtil.createResponseStruct(response));
}
ObjectValue webSocketConnector = BallerinaValues.createObjectValue(WebSocketConstants.PROTOCOL_HTTP_PKG_ID,
WebSocketConstants.WEBSOCKET_CONNECTOR);
WebSocketConnectionInfo connectionInfo = WebSocketUtil.getWebSocketOpenConnectionInfo(null,
webSocketConnector, webSocketClient, wsService);
countDownLatch.countDown();
WebSocketResourceDispatcher.dispatchOnError(connectionInfo, throwable);
}
} |
|
Added support for serializer config. | public Mono<U> getResult(PollingContext<T> pollingContext, TypeReference<U> resultType) {
if (pollingContext.getLatestResponse().getStatus() == LongRunningOperationStatus.FAILED) {
return Mono.error(new RuntimeException("Long running operation failed."));
} else if (pollingContext.getLatestResponse().getStatus() == LongRunningOperationStatus.USER_CANCELLED) {
return Mono.error(new RuntimeException("Long running operation canceled."));
}
String finalGetUrl;
String httpMethod = pollingContext.getData(HTTP_METHOD);
if ("PUT".equalsIgnoreCase(httpMethod) || "PATCH".equalsIgnoreCase(httpMethod)) {
finalGetUrl = pollingContext.getData(REQUEST_URL);
} else if ("POST".equalsIgnoreCase(httpMethod) && pollingContext.getData(LOCATION) != null) {
finalGetUrl = pollingContext.getData(LOCATION);
} else {
throw logger.logExceptionAsError(new RuntimeException("Cannot get final result"));
}
if (finalGetUrl == null) {
String latestResponseBody = pollingContext.getData(POLL_RESPONSE_BODY);
if (TypeUtil.isTypeOrSubTypeOf(BinaryData.class, resultType.getJavaType())) {
return (Mono<U>) Mono.just(BinaryData.fromString(latestResponseBody));
} else {
return Mono.fromCallable(() -> serializer.deserialize(latestResponseBody, resultType.getJavaType(),
SerializerEncoding.JSON));
}
} else {
HttpRequest request = new HttpRequest(HttpMethod.GET, finalGetUrl);
Mono<HttpResponse> responseMono;
if (context == null) {
responseMono = httpPipeline.send(request);
} else {
responseMono = httpPipeline.send(request, context);
}
return responseMono.flatMap(res -> {
if (TypeUtil.isTypeOrSubTypeOf(BinaryData.class, resultType.getJavaType())) {
return (Mono<U>) BinaryData.fromFlux(res.getBody());
} else {
return res.getBodyAsString().flatMap(body -> Mono.fromCallable(() ->
serializer.deserialize(body, resultType.getJavaType(), SerializerEncoding.JSON)));
}
});
}
} | SerializerEncoding.JSON)); | public Mono<U> getResult(PollingContext<T> pollingContext, TypeReference<U> resultType) {
if (pollingContext.getLatestResponse().getStatus() == LongRunningOperationStatus.FAILED) {
return Mono.error(new AzureException("Long running operation failed."));
} else if (pollingContext.getLatestResponse().getStatus() == LongRunningOperationStatus.USER_CANCELLED) {
return Mono.error(new AzureException("Long running operation cancelled."));
}
String finalGetUrl;
String httpMethod = pollingContext.getData(PollingConstants.HTTP_METHOD);
if (HttpMethod.PUT.name().equalsIgnoreCase(httpMethod)
|| HttpMethod.PATCH.name().equalsIgnoreCase(httpMethod)) {
finalGetUrl = pollingContext.getData(PollingConstants.REQUEST_URL);
} else if (HttpMethod.POST.name().equalsIgnoreCase(httpMethod)
&& pollingContext.getData(PollingConstants.LOCATION) != null) {
finalGetUrl = pollingContext.getData(PollingConstants.LOCATION);
} else {
return Mono.error(new AzureException("Cannot get final result"));
}
if (finalGetUrl == null) {
String latestResponseBody = pollingContext.getData(PollingConstants.POLL_RESPONSE_BODY);
return PollingUtils.deserializeResponse(BinaryData.fromString(latestResponseBody), serializer, resultType);
} else {
HttpRequest request = new HttpRequest(HttpMethod.GET, finalGetUrl);
return httpPipeline.send(request)
.flatMap(HttpResponse::getBodyAsByteArray)
.map(BinaryData::fromBytes)
.flatMap(binaryData -> PollingUtils.deserializeResponse(binaryData, serializer, resultType));
}
} | class LocationPollingStrategy<T, U> implements PollingStrategy<T, U> {
private static final String LOCATION = "Location";
private static final String REQUEST_URL = "requestURL";
private static final String HTTP_METHOD = "httpMethod";
private static final String RETRY_AFTER = "Retry-After";
private static final String POLL_RESPONSE_BODY = "pollResponseBody";
private final JacksonAdapter serializer = new JacksonAdapter();
private final ClientLogger logger = new ClientLogger(LocationPollingStrategy.class);
private final HttpPipeline httpPipeline;
private final Context context;
/**
* Creates an instance of the location polling strategy.
*
* @param httpPipeline an instance of {@link HttpPipeline} to send requests with
* @param context additional metadata to pass along with the request
*/
public LocationPollingStrategy(
HttpPipeline httpPipeline,
Context context) {
this.httpPipeline = httpPipeline;
this.context = context;
}
@Override
public Mono<Boolean> canPoll(Response<?> initialResponse) {
HttpHeader locationHeader = initialResponse.getHeaders().get(LOCATION);
return Mono.just(locationHeader != null);
}
@SuppressWarnings("unchecked")
@Override
public Mono<LongRunningOperationStatus> onInitialResponse(Response<?> response, PollingContext<T> pollingContext,
TypeReference<T> pollResponseType) {
HttpHeader locationHeader = response.getHeaders().get(LOCATION);
if (locationHeader != null) {
pollingContext.setData(LOCATION, locationHeader.getValue());
}
pollingContext.setData(HTTP_METHOD, response.getRequest().getHttpMethod().name());
pollingContext.setData(REQUEST_URL, response.getRequest().getUrl().toString());
if (response.getStatusCode() == 200
|| response.getStatusCode() == 201
|| response.getStatusCode() == 202
|| response.getStatusCode() == 204) {
return Mono.just(LongRunningOperationStatus.IN_PROGRESS);
} else {
throw logger.logExceptionAsError(
new RuntimeException("Operation failed or cancelled: " + response.getStatusCode()));
}
}
@SuppressWarnings("unchecked")
@Override
public Mono<PollResponse<T>> poll(PollingContext<T> pollingContext, TypeReference<T> pollResponseType) {
HttpRequest request = new HttpRequest(HttpMethod.GET, pollingContext.getData(LOCATION));
Mono<HttpResponse> responseMono;
if (context == null) {
responseMono = httpPipeline.send(request);
} else {
responseMono = httpPipeline.send(request, context);
}
return responseMono.flatMap(res -> {
HttpHeader locationHeader = res.getHeaders().get(LOCATION);
if (locationHeader != null) {
pollingContext.setData(LOCATION, locationHeader.getValue());
}
LongRunningOperationStatus status;
if (res.getStatusCode() == 202) {
status = LongRunningOperationStatus.IN_PROGRESS;
} else if (res.getStatusCode() >= 200 && res.getStatusCode() <= 204) {
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
} else {
status = LongRunningOperationStatus.FAILED;
}
return BinaryData.fromFlux(res.getBody()).flatMap(binaryData -> {
pollingContext.setData(POLL_RESPONSE_BODY, binaryData.toString());
if (TypeUtil.isTypeOrSubTypeOf(BinaryData.class, pollResponseType.getJavaType())) {
return (Mono<T>) Mono.just(binaryData);
} else {
return binaryData.toObjectAsync(pollResponseType);
}
}).map(pollResponse -> {
String retryAfter = res.getHeaderValue(RETRY_AFTER);
if (retryAfter != null) {
return new PollResponse<>(status, pollResponse,
Duration.ofSeconds(Long.parseLong(retryAfter)));
} else {
return new PollResponse<>(status, pollResponse);
}
});
});
}
@SuppressWarnings("unchecked")
@Override
@Override
public Mono<T> cancel(PollingContext<T> pollingContext, PollResponse<T> initialResponse) {
return Mono.error(new IllegalStateException("Cancellation is not supported."));
}
} | class LocationPollingStrategy<T, U> implements PollingStrategy<T, U> {
private final HttpPipeline httpPipeline;
private final ObjectSerializer serializer;
/**
* Creates an instance of the location polling strategy using a JSON serializer.
*
* @param httpPipeline an instance of {@link HttpPipeline} to send requests with
*/
public LocationPollingStrategy(HttpPipeline httpPipeline) {
this(httpPipeline, new DefaultJsonSerializer());
}
/**
* Creates an instance of the location polling strategy.
*
* @param httpPipeline an instance of {@link HttpPipeline} to send requests with
* @param serializer a custom serializer for serializing and deserializing polling responses
*/
public LocationPollingStrategy(HttpPipeline httpPipeline, ObjectSerializer serializer) {
this.httpPipeline = Objects.requireNonNull(httpPipeline, "'httpPipeline' cannot be null");
this.serializer = Objects.requireNonNull(serializer, "'serializer' cannot be null");
}
@Override
public Mono<Boolean> canPoll(Response<?> initialResponse) {
HttpHeader locationHeader = initialResponse.getHeaders().get(PollingConstants.LOCATION);
if (locationHeader != null) {
try {
new URL(locationHeader.getValue());
return Mono.just(true);
} catch (MalformedURLException e) {
return Mono.just(false);
}
}
return Mono.just(false);
}
@Override
public Mono<PollResponse<T>> onInitialResponse(Response<?> response, PollingContext<T> pollingContext,
TypeReference<T> pollResponseType) {
HttpHeader locationHeader = response.getHeaders().get(PollingConstants.LOCATION);
if (locationHeader != null) {
pollingContext.setData(PollingConstants.LOCATION, locationHeader.getValue());
}
pollingContext.setData(PollingConstants.HTTP_METHOD, response.getRequest().getHttpMethod().name());
pollingContext.setData(PollingConstants.REQUEST_URL, response.getRequest().getUrl().toString());
if (response.getStatusCode() == 200
|| response.getStatusCode() == 201
|| response.getStatusCode() == 202
|| response.getStatusCode() == 204) {
String retryAfterValue = response.getHeaders().getValue(PollingConstants.RETRY_AFTER);
Duration retryAfter = retryAfterValue == null ? null : Duration.ofSeconds(Long.parseLong(retryAfterValue));
return PollingUtils.convertResponse(response.getValue(), serializer, pollResponseType)
.map(value -> new PollResponse<>(LongRunningOperationStatus.IN_PROGRESS, value, retryAfter))
.switchIfEmpty(Mono.defer(() -> Mono.just(new PollResponse<>(
LongRunningOperationStatus.IN_PROGRESS, null, retryAfter))));
} else {
return Mono.error(new AzureException(String.format("Operation failed or cancelled with status code %d,"
+ ", 'Location' header: %s, and response body: %s", response.getStatusCode(), locationHeader,
PollingUtils.serializeResponse(response.getValue(), serializer))));
}
}
@Override
public Mono<PollResponse<T>> poll(PollingContext<T> pollingContext, TypeReference<T> pollResponseType) {
HttpRequest request = new HttpRequest(HttpMethod.GET, pollingContext.getData(PollingConstants.LOCATION));
return httpPipeline.send(request).flatMap(response -> {
HttpHeader locationHeader = response.getHeaders().get(PollingConstants.LOCATION);
if (locationHeader != null) {
pollingContext.setData(PollingConstants.LOCATION, locationHeader.getValue());
}
LongRunningOperationStatus status;
if (response.getStatusCode() == 202) {
status = LongRunningOperationStatus.IN_PROGRESS;
} else if (response.getStatusCode() >= 200 && response.getStatusCode() <= 204) {
status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
} else {
status = LongRunningOperationStatus.FAILED;
}
return response.getBodyAsByteArray().map(BinaryData::fromBytes).flatMap(binaryData -> {
pollingContext.setData(PollingConstants.POLL_RESPONSE_BODY, binaryData.toString());
String retryAfterValue = response.getHeaders().getValue(PollingConstants.RETRY_AFTER);
Duration retryAfter = retryAfterValue == null ? null
: Duration.ofSeconds(Long.parseLong(retryAfterValue));
return PollingUtils.deserializeResponse(binaryData, serializer, pollResponseType)
.map(value -> new PollResponse<>(status, value, retryAfter));
});
});
}
@Override
} |
I add more uts to cover other operators. I found no problems | public OptExpression visitLogicalTableScan(OptExpression optExpression, Void context) {
LogicalScanOperator scanOperator = optExpression.getOp().cast();
Projection projection = scanOperator.getProjection();
if (projection != null) {
projection.getColumnRefMap().values().forEach(s -> requiredOutputColumns.union(s.getUsedColumns()));
}
Set<ColumnRefOperator> outputColumns =
scanOperator.getColRefToColumnMetaMap().keySet().stream().filter(requiredOutputColumns::contains)
.collect(Collectors.toSet());
outputColumns.addAll(Utils.extractColumnRef(scanOperator.getPredicate()));
if (outputColumns.isEmpty()) {
outputColumns.add(
Utils.findSmallestColumnRefFromTable(scanOperator.getColRefToColumnMetaMap(), scanOperator.getTable()));
}
ImmutableMap.Builder<ColumnRefOperator, Column> columnRefColumnMapBuilder = new ImmutableMap.Builder<>();
scanOperator.getColRefToColumnMetaMap().keySet().stream()
.filter(outputColumns::contains)
.forEach(key -> columnRefColumnMapBuilder.put(key, scanOperator.getColRefToColumnMetaMap().get(key)));
ImmutableMap<ColumnRefOperator, Column> newColumnRefMap = columnRefColumnMapBuilder.build();
if (newColumnRefMap.size() != scanOperator.getColRefToColumnMetaMap().size()) {
Operator.Builder builder = OperatorBuilderFactory.build(scanOperator);
builder.withOperator(scanOperator);
LogicalScanOperator.Builder scanBuilder = (LogicalScanOperator.Builder) builder;
scanBuilder.setColRefToColumnMetaMap(newColumnRefMap);
ColumnRefSet outputRefSet = new ColumnRefSet(outputColumns);
outputRefSet.except(requiredOutputColumns);
if (!outputRefSet.isEmpty() && projection == null) {
Map<ColumnRefOperator, ScalarOperator> projectionMap = Maps.newHashMap();
for (ColumnRefOperator columnRefOperator : outputColumns) {
if (outputRefSet.contains(columnRefOperator)) {
continue;
}
projectionMap.put(columnRefOperator, columnRefOperator);
}
Projection newProjection = new Projection(projectionMap);
builder.setProjection(newProjection);
}
Operator newQueryOp = builder.build();
return OptExpression.create(newQueryOp);
} else {
return optExpression;
}
} | } | public OptExpression visitLogicalTableScan(OptExpression optExpression, Void context) {
LogicalScanOperator scanOperator = optExpression.getOp().cast();
Projection projection = scanOperator.getProjection();
if (projection != null) {
projection.getColumnRefMap().values().forEach(s -> requiredOutputColumns.union(s.getUsedColumns()));
}
Set<ColumnRefOperator> outputColumns =
scanOperator.getColRefToColumnMetaMap().keySet().stream().filter(requiredOutputColumns::contains)
.collect(Collectors.toSet());
outputColumns.addAll(Utils.extractColumnRef(scanOperator.getPredicate()));
if (outputColumns.isEmpty()) {
outputColumns.add(
Utils.findSmallestColumnRefFromTable(scanOperator.getColRefToColumnMetaMap(), scanOperator.getTable()));
}
ImmutableMap.Builder<ColumnRefOperator, Column> columnRefColumnMapBuilder = new ImmutableMap.Builder<>();
scanOperator.getColRefToColumnMetaMap().keySet().stream()
.filter(outputColumns::contains)
.forEach(key -> columnRefColumnMapBuilder.put(key, scanOperator.getColRefToColumnMetaMap().get(key)));
ImmutableMap<ColumnRefOperator, Column> newColumnRefMap = columnRefColumnMapBuilder.build();
if (newColumnRefMap.size() != scanOperator.getColRefToColumnMetaMap().size()) {
Operator.Builder builder = OperatorBuilderFactory.build(scanOperator);
builder.withOperator(scanOperator);
LogicalScanOperator.Builder scanBuilder = (LogicalScanOperator.Builder) builder;
scanBuilder.setColRefToColumnMetaMap(newColumnRefMap);
ColumnRefSet outputRefSet = new ColumnRefSet(outputColumns);
outputRefSet.except(requiredOutputColumns);
if (!outputRefSet.isEmpty() && projection == null) {
Map<ColumnRefOperator, ScalarOperator> projectionMap = Maps.newHashMap();
for (ColumnRefOperator columnRefOperator : outputColumns) {
if (outputRefSet.contains(columnRefOperator)) {
continue;
}
projectionMap.put(columnRefOperator, columnRefOperator);
}
Projection newProjection = new Projection(projectionMap);
builder.setProjection(newProjection);
}
Operator newQueryOp = builder.build();
return OptExpression.create(newQueryOp);
} else {
return optExpression;
}
} | class ColumnPruneVisitor extends OptExpressionVisitor<OptExpression, Void> {
public OptExpression visitLogicalAggregate(OptExpression optExpression, Void context) {
LogicalAggregationOperator aggregationOperator = (LogicalAggregationOperator) optExpression.getOp();
if (aggregationOperator.getProjection() != null) {
Projection projection = aggregationOperator.getProjection();
projection.getColumnRefMap().values().forEach(s -> requiredOutputColumns.union(s.getUsedColumns()));
}
if (aggregationOperator.getPredicate() != null) {
requiredOutputColumns.union(Utils.extractColumnRef(aggregationOperator.getPredicate()));
}
requiredOutputColumns.union(aggregationOperator.getGroupingKeys());
for (Map.Entry<ColumnRefOperator, CallOperator> entry : aggregationOperator.getAggregations().entrySet()) {
requiredOutputColumns.union(entry.getKey());
requiredOutputColumns.union(Utils.extractColumnRef(entry.getValue()));
}
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(aggregationOperator, children);
}
public OptExpression visitLogicalUnion(OptExpression optExpression, Void context) {
for (int childIdx = 0; childIdx < optExpression.arity(); ++childIdx) {
requiredOutputColumns.union(optExpression.getChildOutputColumns(childIdx));
}
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(optExpression.getOp(), children);
}
public OptExpression visitLogicalFilter(OptExpression optExpression, Void context) {
LogicalFilterOperator filterOperator = (LogicalFilterOperator) optExpression.getOp();
ColumnRefSet requiredInputColumns = filterOperator.getRequiredChildInputColumns();
requiredOutputColumns.union(requiredInputColumns);
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(filterOperator, children);
}
public OptExpression visitLogicalJoin(OptExpression optExpression, Void context) {
LogicalJoinOperator joinOperator = (LogicalJoinOperator) optExpression.getOp();
requiredOutputColumns.union(joinOperator.getRequiredChildInputColumns());
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(joinOperator, children);
}
public OptExpression visit(OptExpression optExpression, Void context) {
throw UnsupportedException.unsupportedException(String.format("ColumnPruner does not support:%s", optExpression));
}
private List<OptExpression> visitChildren(OptExpression optExpression) {
List<OptExpression> children = Lists.newArrayList();
for (OptExpression child : optExpression.getInputs()) {
children.add(child.getOp().accept(this, child, null));
}
return children;
}
} | class ColumnPruneVisitor extends OptExpressionVisitor<OptExpression, Void> {
public OptExpression visitLogicalAggregate(OptExpression optExpression, Void context) {
LogicalAggregationOperator aggregationOperator = (LogicalAggregationOperator) optExpression.getOp();
if (aggregationOperator.getProjection() != null) {
Projection projection = aggregationOperator.getProjection();
projection.getColumnRefMap().values().forEach(s -> requiredOutputColumns.union(s.getUsedColumns()));
}
if (aggregationOperator.getPredicate() != null) {
requiredOutputColumns.union(Utils.extractColumnRef(aggregationOperator.getPredicate()));
}
requiredOutputColumns.union(aggregationOperator.getGroupingKeys());
for (Map.Entry<ColumnRefOperator, CallOperator> entry : aggregationOperator.getAggregations().entrySet()) {
requiredOutputColumns.union(entry.getKey());
requiredOutputColumns.union(Utils.extractColumnRef(entry.getValue()));
}
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(aggregationOperator, children);
}
public OptExpression visitLogicalUnion(OptExpression optExpression, Void context) {
for (int childIdx = 0; childIdx < optExpression.arity(); ++childIdx) {
requiredOutputColumns.union(optExpression.getChildOutputColumns(childIdx));
}
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(optExpression.getOp(), children);
}
public OptExpression visitLogicalFilter(OptExpression optExpression, Void context) {
LogicalFilterOperator filterOperator = (LogicalFilterOperator) optExpression.getOp();
ColumnRefSet requiredInputColumns = filterOperator.getRequiredChildInputColumns();
requiredOutputColumns.union(requiredInputColumns);
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(filterOperator, children);
}
public OptExpression visitLogicalJoin(OptExpression optExpression, Void context) {
LogicalJoinOperator joinOperator = (LogicalJoinOperator) optExpression.getOp();
requiredOutputColumns.union(joinOperator.getRequiredChildInputColumns());
List<OptExpression> children = visitChildren(optExpression);
return OptExpression.create(joinOperator, children);
}
public OptExpression visit(OptExpression optExpression, Void context) {
throw UnsupportedException.unsupportedException(String.format("ColumnPruner does not support:%s", optExpression));
}
private List<OptExpression> visitChildren(OptExpression optExpression) {
List<OptExpression> children = Lists.newArrayList();
for (OptExpression child : optExpression.getInputs()) {
children.add(child.getOp().accept(this, child, null));
}
return children;
}
} |
:clap: remove repeated and useless :clap: | public void validate(PipelineOptions pipelineOptions) {
checkState(
hosts() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires a list of hosts to be set via withHosts(hosts)");
checkState(
port() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires a "
+ "valid port number to be set via withPort(port)");
checkState(
keyspace() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires a keyspace to be set via "
+ "withKeyspace(keyspace)");
checkState(
entity() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires an entity to be set via "
+ "withEntity(entity)");
} | "CassandraIO." | public void validate(PipelineOptions pipelineOptions) {
checkState(
hosts() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires a list of hosts to be set via withHosts(hosts)");
checkState(
port() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires a "
+ "valid port number to be set via withPort(port)");
checkState(
keyspace() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires a keyspace to be set via "
+ "withKeyspace(keyspace)");
checkState(
entity() != null,
"CassandraIO."
+ getMutationTypeName()
+ "() requires an entity to be set via "
+ "withEntity(entity)");
} | class in the input {@link PCollection} | class in the input {@link PCollection} |
This feature will do in Support alter materialized view properties or later PR | public static int analyzePartitionRefreshNumber(Map<String, String> properties) throws AnalysisException {
int partitionRefreshNumber = -1;
if (properties != null && properties.containsKey(PROPERTIES_PARTITION_REFRESH_NUMBER)) {
try {
partitionRefreshNumber = Integer.parseInt(properties.get(PROPERTIES_PARTITION_REFRESH_NUMBER));
} catch (NumberFormatException e) {
throw new AnalysisException("Partition Refresh Number: " + e.getMessage());
}
if (partitionRefreshNumber <= 0) {
throw new AnalysisException("Partition Refresh Number should larger than 0.");
}
properties.remove(PROPERTIES_PARTITION_REFRESH_NUMBER);
}
return partitionRefreshNumber;
} | throw new AnalysisException("Partition Refresh Number should larger than 0."); | public static int analyzePartitionRefreshNumber(Map<String, String> properties) throws AnalysisException {
int partitionRefreshNumber = -1;
if (properties != null && properties.containsKey(PROPERTIES_PARTITION_REFRESH_NUMBER)) {
try {
partitionRefreshNumber = Integer.parseInt(properties.get(PROPERTIES_PARTITION_REFRESH_NUMBER));
} catch (NumberFormatException e) {
throw new AnalysisException("Partition Refresh Number: " + e.getMessage());
}
if (partitionRefreshNumber <= 0) {
throw new AnalysisException("Partition Refresh Number should larger than 0.");
}
properties.remove(PROPERTIES_PARTITION_REFRESH_NUMBER);
}
return partitionRefreshNumber;
} | class PropertyAnalyzer {
private static final Logger LOG = LogManager.getLogger(PropertyAnalyzer.class);
private static final String COMMA_SEPARATOR = ",";
public static final String PROPERTIES_SHORT_KEY = "short_key";
public static final String PROPERTIES_REPLICATION_NUM = "replication_num";
public static final String PROPERTIES_STORAGE_TYPE = "storage_type";
public static final String PROPERTIES_STORAGE_MEDIUM = "storage_medium";
public static final String PROPERTIES_STORAGE_COLDOWN_TIME = "storage_cooldown_time";
public static final String PROPERTIES_VERSION_INFO = "version_info";
public static final String PROPERTIES_SCHEMA_VERSION = "schema_version";
public static final String PROPERTIES_BF_COLUMNS = "bloom_filter_columns";
public static final String PROPERTIES_BF_FPP = "bloom_filter_fpp";
private static final double MAX_FPP = 0.05;
private static final double MIN_FPP = 0.0001;
public static final String PROPERTIES_COLUMN_SEPARATOR = "column_separator";
public static final String PROPERTIES_LINE_DELIMITER = "line_delimiter";
public static final String PROPERTIES_COLOCATE_WITH = "colocate_with";
public static final String PROPERTIES_TIMEOUT = "timeout";
public static final String PROPERTIES_DISTRIBUTION_TYPE = "distribution_type";
public static final String PROPERTIES_SEND_CLEAR_ALTER_TASK = "send_clear_alter_tasks";
public static final String PROPERTIES_COMPRESSION = "compression";
public static final String PROPERTIES_COLOCATE_MV = "colocate_mv";
/*
* for upgrade alpha rowset to beta rowset, valid value: v1, v2
* v1: alpha rowset
* v2: beta rowset
*/
public static final String PROPERTIES_STORAGE_FORMAT = "storage_format";
public static final String PROPERTIES_INMEMORY = "in_memory";
public static final String PROPERTIES_ENABLE_PERSISTENT_INDEX = "enable_persistent_index";
public static final String PROPERTIES_WRITE_QUORUM = "write_quorum";
public static final String PROPERTIES_REPLICATED_STORAGE = "replicated_storage";
public static final String PROPERTIES_TABLET_TYPE = "tablet_type";
public static final String PROPERTIES_STRICT_RANGE = "strict_range";
public static final String PROPERTIES_USE_TEMP_PARTITION_NAME = "use_temp_partition_name";
public static final String PROPERTIES_TYPE = "type";
public static final String ENABLE_LOW_CARD_DICT_TYPE = "enable_low_card_dict";
public static final String ABLE_LOW_CARD_DICT = "1";
public static final String DISABLE_LOW_CARD_DICT = "0";
public static final String PROPERTIES_ENABLE_STORAGE_CACHE = "enable_storage_cache";
public static final String PROPERTIES_STORAGE_CACHE_TTL = "storage_cache_ttl";
public static final String PROPERTIES_ALLOW_ASYNC_WRITE_BACK = "allow_async_write_back";
public static final String PROPERTIES_PARTITION_TTL_NUMBER = "partition_ttl_number";
public static final String PROPERTIES_PARTITION_REFRESH_NUMBER = "partition_refresh_number";
public static DataProperty analyzeDataProperty(Map<String, String> properties, DataProperty oldDataProperty)
throws AnalysisException {
if (properties == null) {
return oldDataProperty;
}
TStorageMedium storageMedium = null;
long coolDownTimeStamp = DataProperty.MAX_COOLDOWN_TIME_MS;
boolean hasMedium = false;
boolean hasCooldown = false;
for (Map.Entry<String, String> entry : properties.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (!hasMedium && key.equalsIgnoreCase(PROPERTIES_STORAGE_MEDIUM)) {
hasMedium = true;
if (value.equalsIgnoreCase(TStorageMedium.SSD.name())) {
storageMedium = TStorageMedium.SSD;
} else if (value.equalsIgnoreCase(TStorageMedium.HDD.name())) {
storageMedium = TStorageMedium.HDD;
} else {
throw new AnalysisException("Invalid storage medium: " + value);
}
} else if (!hasCooldown && key.equalsIgnoreCase(PROPERTIES_STORAGE_COLDOWN_TIME)) {
hasCooldown = true;
DateLiteral dateLiteral = new DateLiteral(value, Type.DATETIME);
coolDownTimeStamp = dateLiteral.unixTimestamp(TimeUtils.getTimeZone());
}
}
if (!hasCooldown && !hasMedium) {
return oldDataProperty;
}
properties.remove(PROPERTIES_STORAGE_MEDIUM);
properties.remove(PROPERTIES_STORAGE_COLDOWN_TIME);
if (hasCooldown && !hasMedium) {
throw new AnalysisException("Invalid data property. storage medium property is not found");
}
if (storageMedium == TStorageMedium.HDD && hasCooldown) {
throw new AnalysisException("Can not assign cooldown timestamp to HDD storage medium");
}
long currentTimeMs = System.currentTimeMillis();
if (storageMedium == TStorageMedium.SSD && hasCooldown) {
if (coolDownTimeStamp <= currentTimeMs) {
throw new AnalysisException("Cooldown time should later than now");
}
}
if (storageMedium == TStorageMedium.SSD && !hasCooldown) {
coolDownTimeStamp = ((Config.tablet_sched_storage_cooldown_second <= 0) ||
((DataProperty.MAX_COOLDOWN_TIME_MS - currentTimeMs) / 1000L <
Config.tablet_sched_storage_cooldown_second)) ?
DataProperty.MAX_COOLDOWN_TIME_MS :
currentTimeMs + Config.tablet_sched_storage_cooldown_second * 1000L;
}
Preconditions.checkNotNull(storageMedium);
return new DataProperty(storageMedium, coolDownTimeStamp);
}
public static short analyzeShortKeyColumnCount(Map<String, String> properties) throws AnalysisException {
short shortKeyColumnCount = (short) -1;
if (properties != null && properties.containsKey(PROPERTIES_SHORT_KEY)) {
try {
shortKeyColumnCount = Short.parseShort(properties.get(PROPERTIES_SHORT_KEY));
} catch (NumberFormatException e) {
throw new AnalysisException("Short key: " + e.getMessage());
}
if (shortKeyColumnCount <= 0) {
throw new AnalysisException("Short key column count should larger than 0.");
}
properties.remove(PROPERTIES_SHORT_KEY);
}
return shortKeyColumnCount;
}
public static int analyzePartitionTimeToLive(Map<String, String> properties) throws AnalysisException {
int partitionTimeToLive = -1;
if (properties != null && properties.containsKey(PROPERTIES_PARTITION_TTL_NUMBER)) {
try {
partitionTimeToLive = Integer.parseInt(properties.get(PROPERTIES_PARTITION_TTL_NUMBER));
} catch (NumberFormatException e) {
throw new AnalysisException("Partition TTL Number: " + e.getMessage());
}
if (partitionTimeToLive <= 0) {
throw new AnalysisException("Partition TTL Number should larger than 0.");
}
properties.remove(PROPERTIES_PARTITION_TTL_NUMBER);
}
return partitionTimeToLive;
}
public static Short analyzeReplicationNum(Map<String, String> properties, short oldReplicationNum)
throws AnalysisException {
short replicationNum = oldReplicationNum;
if (properties != null && properties.containsKey(PROPERTIES_REPLICATION_NUM)) {
try {
replicationNum = Short.parseShort(properties.get(PROPERTIES_REPLICATION_NUM));
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
}
checkAvailableBackendsIsEnough(replicationNum);
properties.remove(PROPERTIES_REPLICATION_NUM);
}
return replicationNum;
}
public static Short analyzeReplicationNum(Map<String, String> properties, boolean isDefault)
throws AnalysisException {
String key = "default.";
if (isDefault) {
key += PropertyAnalyzer.PROPERTIES_REPLICATION_NUM;
} else {
key = PropertyAnalyzer.PROPERTIES_REPLICATION_NUM;
}
short replicationNum = Short.parseShort(properties.get(key));
checkAvailableBackendsIsEnough(replicationNum);
return replicationNum;
}
private static void checkAvailableBackendsIsEnough(short replicationNum) throws AnalysisException {
if (replicationNum <= 0) {
throw new AnalysisException("Replication num should larger than 0. (suggested 3)");
}
List<Long> backendIds = GlobalStateMgr.getCurrentSystemInfo().getAvailableBackendIds();
if (replicationNum > backendIds.size()) {
throw new AnalysisException("Replication num should be less than the number of available BE nodes. "
+ "Replication num is " + replicationNum + " available BE nodes is " + backendIds.size());
}
}
public static String analyzeColumnSeparator(Map<String, String> properties, String oldColumnSeparator) {
String columnSeparator = oldColumnSeparator;
if (properties != null && properties.containsKey(PROPERTIES_COLUMN_SEPARATOR)) {
columnSeparator = properties.get(PROPERTIES_COLUMN_SEPARATOR);
properties.remove(PROPERTIES_COLUMN_SEPARATOR);
}
return columnSeparator;
}
public static String analyzeRowDelimiter(Map<String, String> properties, String oldRowDelimiter) {
String rowDelimiter = oldRowDelimiter;
if (properties != null && properties.containsKey(PROPERTIES_LINE_DELIMITER)) {
rowDelimiter = properties.get(PROPERTIES_LINE_DELIMITER);
properties.remove(PROPERTIES_LINE_DELIMITER);
}
return rowDelimiter;
}
public static TStorageType analyzeStorageType(Map<String, String> properties) throws AnalysisException {
TStorageType tStorageType = TStorageType.COLUMN;
if (properties != null && properties.containsKey(PROPERTIES_STORAGE_TYPE)) {
String storageType = properties.get(PROPERTIES_STORAGE_TYPE);
if (storageType.equalsIgnoreCase(TStorageType.COLUMN.name())) {
tStorageType = TStorageType.COLUMN;
} else {
throw new AnalysisException("Invalid storage type: " + storageType);
}
properties.remove(PROPERTIES_STORAGE_TYPE);
}
return tStorageType;
}
public static TTabletType analyzeTabletType(Map<String, String> properties) throws AnalysisException {
TTabletType tTabletType = TTabletType.TABLET_TYPE_DISK;
if (properties != null && properties.containsKey(PROPERTIES_TABLET_TYPE)) {
String tabletType = properties.get(PROPERTIES_TABLET_TYPE);
if (tabletType.equalsIgnoreCase("memory")) {
tTabletType = TTabletType.TABLET_TYPE_MEMORY;
} else if (tabletType.equalsIgnoreCase("disk")) {
tTabletType = TTabletType.TABLET_TYPE_DISK;
} else {
throw new AnalysisException(("Invalid tablet type"));
}
properties.remove(PROPERTIES_TABLET_TYPE);
}
return tTabletType;
}
public static Long analyzeVersionInfo(Map<String, String> properties) throws AnalysisException {
long versionInfo = Partition.PARTITION_INIT_VERSION;
if (properties != null && properties.containsKey(PROPERTIES_VERSION_INFO)) {
String versionInfoStr = properties.get(PROPERTIES_VERSION_INFO);
try {
versionInfo = Long.parseLong(versionInfoStr);
} catch (NumberFormatException e) {
throw new AnalysisException("version info format error.");
}
properties.remove(PROPERTIES_VERSION_INFO);
}
return versionInfo;
}
public static int analyzeSchemaVersion(Map<String, String> properties) throws AnalysisException {
int schemaVersion = 0;
if (properties != null && properties.containsKey(PROPERTIES_SCHEMA_VERSION)) {
String schemaVersionStr = properties.get(PROPERTIES_SCHEMA_VERSION);
try {
schemaVersion = Integer.parseInt(schemaVersionStr);
} catch (Exception e) {
throw new AnalysisException("schema version format error");
}
properties.remove(PROPERTIES_SCHEMA_VERSION);
}
return schemaVersion;
}
public static Set<String> analyzeBloomFilterColumns(Map<String, String> properties, List<Column> columns,
boolean isPrimaryKey) throws AnalysisException {
Set<String> bfColumns = null;
if (properties != null && properties.containsKey(PROPERTIES_BF_COLUMNS)) {
bfColumns = Sets.newHashSet();
String bfColumnsStr = properties.get(PROPERTIES_BF_COLUMNS);
if (Strings.isNullOrEmpty(bfColumnsStr)) {
return bfColumns;
}
String[] bfColumnArr = bfColumnsStr.split(COMMA_SEPARATOR);
Set<String> bfColumnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
for (String bfColumn : bfColumnArr) {
bfColumn = bfColumn.trim();
String finalBfColumn = bfColumn;
Column column = columns.stream().filter(col -> col.getName().equalsIgnoreCase(finalBfColumn))
.findFirst()
.orElse(null);
if (column == null) {
throw new AnalysisException(
String.format("Invalid bloom filter column '%s': not exists", bfColumn));
}
Type type = column.getType();
if (!type.supportBloomFilter()) {
throw new AnalysisException(String.format("Invalid bloom filter column '%s': unsupported type %s",
bfColumn, type));
}
if (!(column.isKey() || isPrimaryKey || column.getAggregationType() == AggregateType.NONE)) {
throw new AnalysisException("Bloom filter index only used in columns of DUP_KEYS/PRIMARY table or "
+ "key columns of UNIQUE_KEYS/AGG_KEYS table. invalid column: " + bfColumn);
}
if (bfColumnSet.contains(bfColumn)) {
throw new AnalysisException(String.format("Duplicate bloom filter column '%s'", bfColumn));
}
bfColumnSet.add(bfColumn);
bfColumns.add(column.getName());
}
properties.remove(PROPERTIES_BF_COLUMNS);
}
return bfColumns;
}
public static double analyzeBloomFilterFpp(Map<String, String> properties) throws AnalysisException {
double bfFpp = 0;
if (properties != null && properties.containsKey(PROPERTIES_BF_FPP)) {
String bfFppStr = properties.get(PROPERTIES_BF_FPP);
try {
bfFpp = Double.parseDouble(bfFppStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Bloom filter fpp is not Double");
}
if (bfFpp < MIN_FPP || bfFpp > MAX_FPP) {
throw new AnalysisException("Bloom filter fpp should in [" + MIN_FPP + ", " + MAX_FPP + "]");
}
properties.remove(PROPERTIES_BF_FPP);
}
return bfFpp;
}
public static String analyzeColocate(Map<String, String> properties) throws AnalysisException {
String colocateGroup = null;
if (properties != null && properties.containsKey(PROPERTIES_COLOCATE_WITH)) {
colocateGroup = properties.get(PROPERTIES_COLOCATE_WITH);
properties.remove(PROPERTIES_COLOCATE_WITH);
}
return colocateGroup;
}
public static long analyzeTimeout(Map<String, String> properties, long defaultTimeout) throws AnalysisException {
long timeout = defaultTimeout;
if (properties != null && properties.containsKey(PROPERTIES_TIMEOUT)) {
String timeoutStr = properties.get(PROPERTIES_TIMEOUT);
try {
timeout = Long.parseLong(timeoutStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Invalid timeout format: " + timeoutStr);
}
properties.remove(PROPERTIES_TIMEOUT);
}
return timeout;
}
public static TStorageFormat analyzeStorageFormat(Map<String, String> properties) throws AnalysisException {
String storageFormat;
if (properties != null && properties.containsKey(PROPERTIES_STORAGE_FORMAT)) {
storageFormat = properties.get(PROPERTIES_STORAGE_FORMAT);
properties.remove(PROPERTIES_STORAGE_FORMAT);
} else {
return TStorageFormat.DEFAULT;
}
if (storageFormat.equalsIgnoreCase("v1")) {
return TStorageFormat.V1;
} else if (storageFormat.equalsIgnoreCase("v2")) {
return TStorageFormat.V2;
} else if (storageFormat.equalsIgnoreCase("default")) {
return TStorageFormat.DEFAULT;
} else {
throw new AnalysisException("unknown storage format: " + storageFormat);
}
}
public static TCompressionType analyzeCompressionType(Map<String, String> properties) throws AnalysisException {
String compressionType;
if (properties == null || !properties.containsKey(PROPERTIES_COMPRESSION)) {
return TCompressionType.LZ4_FRAME;
}
compressionType = properties.get(PROPERTIES_COMPRESSION);
properties.remove(PROPERTIES_COMPRESSION);
if (CompressionUtils.getCompressTypeByName(compressionType) != null) {
return CompressionUtils.getCompressTypeByName(compressionType);
} else {
throw new AnalysisException("unknown compression type: " + compressionType);
}
}
public static String analyzeWriteQuorum(Map<String, String> properties) throws AnalysisException {
String writeQuorum;
if (properties == null || !properties.containsKey(PROPERTIES_WRITE_QUORUM)) {
return WriteQuorum.MAJORITY;
}
writeQuorum = properties.get(PROPERTIES_WRITE_QUORUM);
properties.remove(PROPERTIES_WRITE_QUORUM);
if (WriteQuorum.findTWriteQuorumByName(writeQuorum) != null) {
return writeQuorum;
} else {
throw new AnalysisException("unknown write quorum: " + writeQuorum);
}
}
public static boolean analyzeBooleanProp(Map<String, String> properties, String propKey, boolean defaultVal) {
if (properties != null && properties.containsKey(propKey)) {
String val = properties.get(propKey);
properties.remove(propKey);
return Boolean.parseBoolean(val);
}
return defaultVal;
}
public static String analyzeType(Map<String, String> properties) {
String type = null;
if (properties != null && properties.containsKey(PROPERTIES_TYPE)) {
type = properties.get(PROPERTIES_TYPE);
properties.remove(PROPERTIES_TYPE);
}
return type;
}
public static long analyzeLongProp(Map<String, String> properties, String propKey, long defaultVal)
throws AnalysisException {
long val = defaultVal;
if (properties != null && properties.containsKey(propKey)) {
String valStr = properties.get(propKey);
try {
val = Long.parseLong(valStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Invalid " + propKey + " format: " + valStr);
}
properties.remove(propKey);
}
return val;
}
} | class PropertyAnalyzer {
private static final Logger LOG = LogManager.getLogger(PropertyAnalyzer.class);
private static final String COMMA_SEPARATOR = ",";
public static final String PROPERTIES_SHORT_KEY = "short_key";
public static final String PROPERTIES_REPLICATION_NUM = "replication_num";
public static final String PROPERTIES_STORAGE_TYPE = "storage_type";
public static final String PROPERTIES_STORAGE_MEDIUM = "storage_medium";
public static final String PROPERTIES_STORAGE_COLDOWN_TIME = "storage_cooldown_time";
public static final String PROPERTIES_VERSION_INFO = "version_info";
public static final String PROPERTIES_SCHEMA_VERSION = "schema_version";
public static final String PROPERTIES_BF_COLUMNS = "bloom_filter_columns";
public static final String PROPERTIES_BF_FPP = "bloom_filter_fpp";
private static final double MAX_FPP = 0.05;
private static final double MIN_FPP = 0.0001;
public static final String PROPERTIES_COLUMN_SEPARATOR = "column_separator";
public static final String PROPERTIES_LINE_DELIMITER = "line_delimiter";
public static final String PROPERTIES_COLOCATE_WITH = "colocate_with";
public static final String PROPERTIES_TIMEOUT = "timeout";
public static final String PROPERTIES_DISTRIBUTION_TYPE = "distribution_type";
public static final String PROPERTIES_SEND_CLEAR_ALTER_TASK = "send_clear_alter_tasks";
public static final String PROPERTIES_COMPRESSION = "compression";
public static final String PROPERTIES_COLOCATE_MV = "colocate_mv";
/*
* for upgrade alpha rowset to beta rowset, valid value: v1, v2
* v1: alpha rowset
* v2: beta rowset
*/
public static final String PROPERTIES_STORAGE_FORMAT = "storage_format";
public static final String PROPERTIES_INMEMORY = "in_memory";
public static final String PROPERTIES_ENABLE_PERSISTENT_INDEX = "enable_persistent_index";
public static final String PROPERTIES_WRITE_QUORUM = "write_quorum";
public static final String PROPERTIES_REPLICATED_STORAGE = "replicated_storage";
public static final String PROPERTIES_TABLET_TYPE = "tablet_type";
public static final String PROPERTIES_STRICT_RANGE = "strict_range";
public static final String PROPERTIES_USE_TEMP_PARTITION_NAME = "use_temp_partition_name";
public static final String PROPERTIES_TYPE = "type";
public static final String ENABLE_LOW_CARD_DICT_TYPE = "enable_low_card_dict";
public static final String ABLE_LOW_CARD_DICT = "1";
public static final String DISABLE_LOW_CARD_DICT = "0";
public static final String PROPERTIES_ENABLE_STORAGE_CACHE = "enable_storage_cache";
public static final String PROPERTIES_STORAGE_CACHE_TTL = "storage_cache_ttl";
public static final String PROPERTIES_ALLOW_ASYNC_WRITE_BACK = "allow_async_write_back";
public static final String PROPERTIES_PARTITION_TTL_NUMBER = "partition_ttl_number";
public static final String PROPERTIES_PARTITION_REFRESH_NUMBER = "partition_refresh_number";
public static DataProperty analyzeDataProperty(Map<String, String> properties, DataProperty oldDataProperty)
throws AnalysisException {
if (properties == null) {
return oldDataProperty;
}
TStorageMedium storageMedium = null;
long coolDownTimeStamp = DataProperty.MAX_COOLDOWN_TIME_MS;
boolean hasMedium = false;
boolean hasCooldown = false;
for (Map.Entry<String, String> entry : properties.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
if (!hasMedium && key.equalsIgnoreCase(PROPERTIES_STORAGE_MEDIUM)) {
hasMedium = true;
if (value.equalsIgnoreCase(TStorageMedium.SSD.name())) {
storageMedium = TStorageMedium.SSD;
} else if (value.equalsIgnoreCase(TStorageMedium.HDD.name())) {
storageMedium = TStorageMedium.HDD;
} else {
throw new AnalysisException("Invalid storage medium: " + value);
}
} else if (!hasCooldown && key.equalsIgnoreCase(PROPERTIES_STORAGE_COLDOWN_TIME)) {
hasCooldown = true;
DateLiteral dateLiteral = new DateLiteral(value, Type.DATETIME);
coolDownTimeStamp = dateLiteral.unixTimestamp(TimeUtils.getTimeZone());
}
}
if (!hasCooldown && !hasMedium) {
return oldDataProperty;
}
properties.remove(PROPERTIES_STORAGE_MEDIUM);
properties.remove(PROPERTIES_STORAGE_COLDOWN_TIME);
if (hasCooldown && !hasMedium) {
throw new AnalysisException("Invalid data property. storage medium property is not found");
}
if (storageMedium == TStorageMedium.HDD && hasCooldown) {
throw new AnalysisException("Can not assign cooldown timestamp to HDD storage medium");
}
long currentTimeMs = System.currentTimeMillis();
if (storageMedium == TStorageMedium.SSD && hasCooldown) {
if (coolDownTimeStamp <= currentTimeMs) {
throw new AnalysisException("Cooldown time should later than now");
}
}
if (storageMedium == TStorageMedium.SSD && !hasCooldown) {
coolDownTimeStamp = ((Config.tablet_sched_storage_cooldown_second <= 0) ||
((DataProperty.MAX_COOLDOWN_TIME_MS - currentTimeMs) / 1000L <
Config.tablet_sched_storage_cooldown_second)) ?
DataProperty.MAX_COOLDOWN_TIME_MS :
currentTimeMs + Config.tablet_sched_storage_cooldown_second * 1000L;
}
Preconditions.checkNotNull(storageMedium);
return new DataProperty(storageMedium, coolDownTimeStamp);
}
public static short analyzeShortKeyColumnCount(Map<String, String> properties) throws AnalysisException {
short shortKeyColumnCount = (short) -1;
if (properties != null && properties.containsKey(PROPERTIES_SHORT_KEY)) {
try {
shortKeyColumnCount = Short.parseShort(properties.get(PROPERTIES_SHORT_KEY));
} catch (NumberFormatException e) {
throw new AnalysisException("Short key: " + e.getMessage());
}
if (shortKeyColumnCount <= 0) {
throw new AnalysisException("Short key column count should larger than 0.");
}
properties.remove(PROPERTIES_SHORT_KEY);
}
return shortKeyColumnCount;
}
public static int analyzePartitionTimeToLive(Map<String, String> properties) throws AnalysisException {
int partitionTimeToLive = INVALID;
if (properties != null && properties.containsKey(PROPERTIES_PARTITION_TTL_NUMBER)) {
try {
partitionTimeToLive = Integer.parseInt(properties.get(PROPERTIES_PARTITION_TTL_NUMBER));
} catch (NumberFormatException e) {
throw new AnalysisException("Partition TTL Number: " + e.getMessage());
}
if (partitionTimeToLive <= 0) {
partitionTimeToLive = INVALID;
}
properties.remove(PROPERTIES_PARTITION_TTL_NUMBER);
}
return partitionTimeToLive;
}
public static Short analyzeReplicationNum(Map<String, String> properties, short oldReplicationNum)
throws AnalysisException {
short replicationNum = oldReplicationNum;
if (properties != null && properties.containsKey(PROPERTIES_REPLICATION_NUM)) {
try {
replicationNum = Short.parseShort(properties.get(PROPERTIES_REPLICATION_NUM));
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
}
checkAvailableBackendsIsEnough(replicationNum);
properties.remove(PROPERTIES_REPLICATION_NUM);
}
return replicationNum;
}
public static Short analyzeReplicationNum(Map<String, String> properties, boolean isDefault)
throws AnalysisException {
String key = "default.";
if (isDefault) {
key += PropertyAnalyzer.PROPERTIES_REPLICATION_NUM;
} else {
key = PropertyAnalyzer.PROPERTIES_REPLICATION_NUM;
}
short replicationNum = Short.parseShort(properties.get(key));
checkAvailableBackendsIsEnough(replicationNum);
return replicationNum;
}
private static void checkAvailableBackendsIsEnough(short replicationNum) throws AnalysisException {
if (replicationNum <= 0) {
throw new AnalysisException("Replication num should larger than 0. (suggested 3)");
}
List<Long> backendIds = GlobalStateMgr.getCurrentSystemInfo().getAvailableBackendIds();
if (replicationNum > backendIds.size()) {
throw new AnalysisException("Replication num should be less than the number of available BE nodes. "
+ "Replication num is " + replicationNum + " available BE nodes is " + backendIds.size());
}
}
public static String analyzeColumnSeparator(Map<String, String> properties, String oldColumnSeparator) {
String columnSeparator = oldColumnSeparator;
if (properties != null && properties.containsKey(PROPERTIES_COLUMN_SEPARATOR)) {
columnSeparator = properties.get(PROPERTIES_COLUMN_SEPARATOR);
properties.remove(PROPERTIES_COLUMN_SEPARATOR);
}
return columnSeparator;
}
public static String analyzeRowDelimiter(Map<String, String> properties, String oldRowDelimiter) {
String rowDelimiter = oldRowDelimiter;
if (properties != null && properties.containsKey(PROPERTIES_LINE_DELIMITER)) {
rowDelimiter = properties.get(PROPERTIES_LINE_DELIMITER);
properties.remove(PROPERTIES_LINE_DELIMITER);
}
return rowDelimiter;
}
public static TStorageType analyzeStorageType(Map<String, String> properties) throws AnalysisException {
TStorageType tStorageType = TStorageType.COLUMN;
if (properties != null && properties.containsKey(PROPERTIES_STORAGE_TYPE)) {
String storageType = properties.get(PROPERTIES_STORAGE_TYPE);
if (storageType.equalsIgnoreCase(TStorageType.COLUMN.name())) {
tStorageType = TStorageType.COLUMN;
} else {
throw new AnalysisException("Invalid storage type: " + storageType);
}
properties.remove(PROPERTIES_STORAGE_TYPE);
}
return tStorageType;
}
public static TTabletType analyzeTabletType(Map<String, String> properties) throws AnalysisException {
TTabletType tTabletType = TTabletType.TABLET_TYPE_DISK;
if (properties != null && properties.containsKey(PROPERTIES_TABLET_TYPE)) {
String tabletType = properties.get(PROPERTIES_TABLET_TYPE);
if (tabletType.equalsIgnoreCase("memory")) {
tTabletType = TTabletType.TABLET_TYPE_MEMORY;
} else if (tabletType.equalsIgnoreCase("disk")) {
tTabletType = TTabletType.TABLET_TYPE_DISK;
} else {
throw new AnalysisException(("Invalid tablet type"));
}
properties.remove(PROPERTIES_TABLET_TYPE);
}
return tTabletType;
}
public static Long analyzeVersionInfo(Map<String, String> properties) throws AnalysisException {
long versionInfo = Partition.PARTITION_INIT_VERSION;
if (properties != null && properties.containsKey(PROPERTIES_VERSION_INFO)) {
String versionInfoStr = properties.get(PROPERTIES_VERSION_INFO);
try {
versionInfo = Long.parseLong(versionInfoStr);
} catch (NumberFormatException e) {
throw new AnalysisException("version info format error.");
}
properties.remove(PROPERTIES_VERSION_INFO);
}
return versionInfo;
}
public static int analyzeSchemaVersion(Map<String, String> properties) throws AnalysisException {
int schemaVersion = 0;
if (properties != null && properties.containsKey(PROPERTIES_SCHEMA_VERSION)) {
String schemaVersionStr = properties.get(PROPERTIES_SCHEMA_VERSION);
try {
schemaVersion = Integer.parseInt(schemaVersionStr);
} catch (Exception e) {
throw new AnalysisException("schema version format error");
}
properties.remove(PROPERTIES_SCHEMA_VERSION);
}
return schemaVersion;
}
public static Set<String> analyzeBloomFilterColumns(Map<String, String> properties, List<Column> columns,
boolean isPrimaryKey) throws AnalysisException {
Set<String> bfColumns = null;
if (properties != null && properties.containsKey(PROPERTIES_BF_COLUMNS)) {
bfColumns = Sets.newHashSet();
String bfColumnsStr = properties.get(PROPERTIES_BF_COLUMNS);
if (Strings.isNullOrEmpty(bfColumnsStr)) {
return bfColumns;
}
String[] bfColumnArr = bfColumnsStr.split(COMMA_SEPARATOR);
Set<String> bfColumnSet = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER);
for (String bfColumn : bfColumnArr) {
bfColumn = bfColumn.trim();
String finalBfColumn = bfColumn;
Column column = columns.stream().filter(col -> col.getName().equalsIgnoreCase(finalBfColumn))
.findFirst()
.orElse(null);
if (column == null) {
throw new AnalysisException(
String.format("Invalid bloom filter column '%s': not exists", bfColumn));
}
Type type = column.getType();
if (!type.supportBloomFilter()) {
throw new AnalysisException(String.format("Invalid bloom filter column '%s': unsupported type %s",
bfColumn, type));
}
if (!(column.isKey() || isPrimaryKey || column.getAggregationType() == AggregateType.NONE)) {
throw new AnalysisException("Bloom filter index only used in columns of DUP_KEYS/PRIMARY table or "
+ "key columns of UNIQUE_KEYS/AGG_KEYS table. invalid column: " + bfColumn);
}
if (bfColumnSet.contains(bfColumn)) {
throw new AnalysisException(String.format("Duplicate bloom filter column '%s'", bfColumn));
}
bfColumnSet.add(bfColumn);
bfColumns.add(column.getName());
}
properties.remove(PROPERTIES_BF_COLUMNS);
}
return bfColumns;
}
public static double analyzeBloomFilterFpp(Map<String, String> properties) throws AnalysisException {
double bfFpp = 0;
if (properties != null && properties.containsKey(PROPERTIES_BF_FPP)) {
String bfFppStr = properties.get(PROPERTIES_BF_FPP);
try {
bfFpp = Double.parseDouble(bfFppStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Bloom filter fpp is not Double");
}
if (bfFpp < MIN_FPP || bfFpp > MAX_FPP) {
throw new AnalysisException("Bloom filter fpp should in [" + MIN_FPP + ", " + MAX_FPP + "]");
}
properties.remove(PROPERTIES_BF_FPP);
}
return bfFpp;
}
public static String analyzeColocate(Map<String, String> properties) throws AnalysisException {
String colocateGroup = null;
if (properties != null && properties.containsKey(PROPERTIES_COLOCATE_WITH)) {
colocateGroup = properties.get(PROPERTIES_COLOCATE_WITH);
properties.remove(PROPERTIES_COLOCATE_WITH);
}
return colocateGroup;
}
public static long analyzeTimeout(Map<String, String> properties, long defaultTimeout) throws AnalysisException {
long timeout = defaultTimeout;
if (properties != null && properties.containsKey(PROPERTIES_TIMEOUT)) {
String timeoutStr = properties.get(PROPERTIES_TIMEOUT);
try {
timeout = Long.parseLong(timeoutStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Invalid timeout format: " + timeoutStr);
}
properties.remove(PROPERTIES_TIMEOUT);
}
return timeout;
}
public static TStorageFormat analyzeStorageFormat(Map<String, String> properties) throws AnalysisException {
String storageFormat;
if (properties != null && properties.containsKey(PROPERTIES_STORAGE_FORMAT)) {
storageFormat = properties.get(PROPERTIES_STORAGE_FORMAT);
properties.remove(PROPERTIES_STORAGE_FORMAT);
} else {
return TStorageFormat.DEFAULT;
}
if (storageFormat.equalsIgnoreCase("v1")) {
return TStorageFormat.V1;
} else if (storageFormat.equalsIgnoreCase("v2")) {
return TStorageFormat.V2;
} else if (storageFormat.equalsIgnoreCase("default")) {
return TStorageFormat.DEFAULT;
} else {
throw new AnalysisException("unknown storage format: " + storageFormat);
}
}
public static TCompressionType analyzeCompressionType(Map<String, String> properties) throws AnalysisException {
String compressionType;
if (properties == null || !properties.containsKey(PROPERTIES_COMPRESSION)) {
return TCompressionType.LZ4_FRAME;
}
compressionType = properties.get(PROPERTIES_COMPRESSION);
properties.remove(PROPERTIES_COMPRESSION);
if (CompressionUtils.getCompressTypeByName(compressionType) != null) {
return CompressionUtils.getCompressTypeByName(compressionType);
} else {
throw new AnalysisException("unknown compression type: " + compressionType);
}
}
public static String analyzeWriteQuorum(Map<String, String> properties) throws AnalysisException {
String writeQuorum;
if (properties == null || !properties.containsKey(PROPERTIES_WRITE_QUORUM)) {
return WriteQuorum.MAJORITY;
}
writeQuorum = properties.get(PROPERTIES_WRITE_QUORUM);
properties.remove(PROPERTIES_WRITE_QUORUM);
if (WriteQuorum.findTWriteQuorumByName(writeQuorum) != null) {
return writeQuorum;
} else {
throw new AnalysisException("unknown write quorum: " + writeQuorum);
}
}
public static boolean analyzeBooleanProp(Map<String, String> properties, String propKey, boolean defaultVal) {
if (properties != null && properties.containsKey(propKey)) {
String val = properties.get(propKey);
properties.remove(propKey);
return Boolean.parseBoolean(val);
}
return defaultVal;
}
public static String analyzeType(Map<String, String> properties) {
String type = null;
if (properties != null && properties.containsKey(PROPERTIES_TYPE)) {
type = properties.get(PROPERTIES_TYPE);
properties.remove(PROPERTIES_TYPE);
}
return type;
}
public static long analyzeLongProp(Map<String, String> properties, String propKey, long defaultVal)
throws AnalysisException {
long val = defaultVal;
if (properties != null && properties.containsKey(propKey)) {
String valStr = properties.get(propKey);
try {
val = Long.parseLong(valStr);
} catch (NumberFormatException e) {
throw new AnalysisException("Invalid " + propKey + " format: " + valStr);
}
properties.remove(propKey);
}
return val;
}
} |
this logic is wired. why if the object doesn't `hasProperties` then use `QueryDelimiter` or `PathQueryDelimiter` | public String enhanceConnectionString(Map<String, String> enhancedProperties) {
if (enhancedProperties == null || enhancedProperties.isEmpty()) {
return this.jdbcURL;
}
LOGGER.debug("Trying to enhance url for {}", databaseType);
StringBuilder builder = new StringBuilder(this.jdbcURL);
if (!this.hasProperties()) {
builder.append(databaseType.getPathQueryDelimiter());
} else {
builder.append(databaseType.getQueryDelimiter());
}
for (Map.Entry<String, String> entry : enhancedProperties.entrySet()) {
String key = entry.getKey(), value = entry.getValue();
String valueProvidedInConnectionString = this.getProperty(key);
if (valueProvidedInConnectionString == null) {
builder.append(key)
.append("=")
.append(value)
.append(databaseType.getQueryDelimiter());
} else if (!value.equals(valueProvidedInConnectionString)) {
LOGGER.debug("The property {} is set to another value than default {}", key, value);
throw new IllegalArgumentException("Inconsistent property detected");
} else {
LOGGER.debug("The property {} is already set", key);
}
}
String enhancedUrl = builder.toString();
return enhancedUrl.substring(0, enhancedUrl.length() - 1);
} | if (!this.hasProperties()) { | public String enhanceConnectionString(Map<String, String> enhancedProperties) {
if (enhancedProperties == null || enhancedProperties.isEmpty()) {
return this.jdbcURL;
}
LOGGER.debug("Trying to enhance jdbc url for {}", databaseType);
StringBuilder builder = new StringBuilder(this.jdbcURL);
if (!this.hasProperties()) {
builder.append(databaseType.getPathQueryDelimiter());
} else {
builder.append(databaseType.getQueryDelimiter());
}
for (Map.Entry<String, String> entry : enhancedProperties.entrySet()) {
String key = entry.getKey(), value = entry.getValue();
String valueProvidedInConnectionString = this.getProperty(key);
if (valueProvidedInConnectionString == null) {
builder.append(key)
.append("=")
.append(value)
.append(databaseType.getQueryDelimiter());
} else if (!value.equals(valueProvidedInConnectionString)) {
LOGGER.debug("The property {} is set to another value than default {}", key, value);
throw new IllegalArgumentException("Inconsistent property detected");
} else {
LOGGER.debug("The property {} is already set", key);
}
}
String enhancedUrl = builder.toString();
return enhancedUrl.substring(0, enhancedUrl.length() - 1);
} | class JdbcConnectionString {
private static final Logger LOGGER = LoggerFactory.getLogger(JdbcConnectionString.class);
public static final String INVALID_CONNECTION_STRING_FORMAT = "Invalid connection string: %s";
public static final String UNSUPPORTED_DATABASE_TYPE_STRING_FORMAT = "The DatabaseType specified in : %s is not "
+ "supported to enhance authentication with Azure AD by Spring Cloud Azure.";
public static final String INVALID_PROPERTY_PAIR_FORMAT = "Connection string has invalid key value pair: %s";
private static final String TOKEN_VALUE_SEPARATOR = "=";
private final String jdbcURL;
private final Map<String, String> properties = new HashMap<>();
private DatabaseType databaseType = null;
private JdbcConnectionString(String jdbcURL) {
this.jdbcURL = jdbcURL;
}
private void resolveSegments() {
if (!StringUtils.hasText(this.jdbcURL)) {
LOGGER.warn("'connectionString' doesn't have text.");
throw new IllegalArgumentException(String.format(INVALID_CONNECTION_STRING_FORMAT, this.jdbcURL));
}
Optional<DatabaseType> optionalDatabaseType = Arrays.stream(DatabaseType.values())
.filter(databaseType -> this.jdbcURL.startsWith(databaseType.getSchema()))
.findAny();
this.databaseType = optionalDatabaseType.orElseThrow(() -> new AzureUnsupportedDatabaseTypeException(String.format(UNSUPPORTED_DATABASE_TYPE_STRING_FORMAT, this.jdbcURL)));
int pathQueryDelimiterIndex = this.jdbcURL.indexOf(this.databaseType.getPathQueryDelimiter());
if (pathQueryDelimiterIndex < 0) {
return;
}
String hostInfo = this.jdbcURL.substring(databaseType.getSchema().length() + 3, pathQueryDelimiterIndex);
String[] hostInfoArray = hostInfo.split(":");
if (hostInfoArray.length == 2) {
this.properties.put("servername", hostInfoArray[0]);
this.properties.put("port", hostInfoArray[1]);
} else {
this.properties.put("port", hostInfo);
}
String properties = this.jdbcURL.substring(pathQueryDelimiterIndex + 1);
final String[] tokenValuePairs = properties.split(this.databaseType.getQueryDelimiter());
for (String tokenValuePair : tokenValuePairs) {
final String[] pair = tokenValuePair.split(TOKEN_VALUE_SEPARATOR, 2);
String key = pair[0];
if (!StringUtils.hasText(pair[0])) {
throw new IllegalArgumentException(String.format(INVALID_PROPERTY_PAIR_FORMAT, tokenValuePair));
}
if (pair.length < 2) {
this.properties.put(key, NONE_VALUE);
} else {
this.properties.put(key, pair[1]);
}
}
}
public String getProperty(String key) {
return this.properties.get(key);
}
public DatabaseType getDatabaseType() {
return databaseType;
}
public boolean hasProperties() {
return !this.properties.isEmpty();
}
public static JdbcConnectionString resolve(String url) {
JdbcConnectionString jdbcConnectionString = new JdbcConnectionString(url);
try {
jdbcConnectionString.resolveSegments();
} catch (AzureUnsupportedDatabaseTypeException e) {
LOGGER.debug(e.getMessage());
return null;
}
return jdbcConnectionString;
}
} | class JdbcConnectionString {
private static final Logger LOGGER = LoggerFactory.getLogger(JdbcConnectionString.class);
public static final String INVALID_CONNECTION_STRING_FORMAT = "Invalid connection string: %s";
public static final String UNSUPPORTED_DATABASE_TYPE_STRING_FORMAT = "The DatabaseType specified in : %s is not "
+ "supported to enhance authentication with Azure AD by Spring Cloud Azure.";
public static final String INVALID_PROPERTY_PAIR_FORMAT = "Connection string has invalid key value pair: %s";
private static final String TOKEN_VALUE_SEPARATOR = "=";
private final String jdbcURL;
private final Map<String, String> properties = new HashMap<>();
private DatabaseType databaseType = null;
private JdbcConnectionString(String jdbcURL) {
this.jdbcURL = jdbcURL;
}
private void resolveSegments() {
if (!StringUtils.hasText(this.jdbcURL)) {
LOGGER.warn("'connectionString' doesn't have text.");
throw new IllegalArgumentException(String.format(INVALID_CONNECTION_STRING_FORMAT, this.jdbcURL));
}
Optional<DatabaseType> optionalDatabaseType = Arrays.stream(DatabaseType.values())
.filter(databaseType -> this.jdbcURL.startsWith(databaseType.getSchema()))
.findAny();
this.databaseType = optionalDatabaseType.orElseThrow(() -> new AzureUnsupportedDatabaseTypeException(String.format(UNSUPPORTED_DATABASE_TYPE_STRING_FORMAT, this.jdbcURL)));
int pathQueryDelimiterIndex = this.jdbcURL.indexOf(this.databaseType.getPathQueryDelimiter());
if (pathQueryDelimiterIndex < 0) {
return;
}
String hostInfo = this.jdbcURL.substring(databaseType.getSchema().length() + 3, pathQueryDelimiterIndex);
String[] hostInfoArray = hostInfo.split(":");
if (hostInfoArray.length == 2) {
this.properties.put("servername", hostInfoArray[0]);
this.properties.put("port", hostInfoArray[1]);
} else {
this.properties.put("servername", hostInfo);
}
String properties = this.jdbcURL.substring(pathQueryDelimiterIndex + 1);
final String[] tokenValuePairs = properties.split(this.databaseType.getQueryDelimiter());
for (String tokenValuePair : tokenValuePairs) {
final String[] pair = tokenValuePair.split(TOKEN_VALUE_SEPARATOR, 2);
String key = pair[0];
if (!StringUtils.hasText(pair[0])) {
throw new IllegalArgumentException(String.format(INVALID_PROPERTY_PAIR_FORMAT, tokenValuePair));
}
if (pair.length < 2) {
this.properties.put(key, NONE_VALUE);
} else {
this.properties.put(key, pair[1]);
}
}
}
public String getProperty(String key) {
return this.properties.get(key);
}
public DatabaseType getDatabaseType() {
return databaseType;
}
public boolean hasProperties() {
return !this.properties.isEmpty();
}
public static JdbcConnectionString resolve(String url) {
JdbcConnectionString jdbcConnectionString = new JdbcConnectionString(url);
try {
jdbcConnectionString.resolveSegments();
} catch (AzureUnsupportedDatabaseTypeException e) {
LOGGER.debug(e.getMessage());
return null;
}
return jdbcConnectionString;
}
} |
Agree, these tests should be rewritten to use DeploymentTester. | public void includes_application_endpoint_when_declared() {
Instance instance = new Instance(ApplicationId.from("t1", "a1", "default"));
ZoneId zone1 = ZoneId.from(Environment.prod, RegionName.from("aws-us-east-1c"));
ZoneId zone2 = ZoneId.from(Environment.prod, RegionName.from("aws-us-west-2a"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("beta,main")
.region(zone1.region())
.region(zone2.region())
.applicationEndpoint("a", "qrs", zone2.region().value(),
Map.of(InstanceName.from("beta"), 2,
InstanceName.from("main"), 8))
.applicationEndpoint("b", "qrs", zone2.region().value(),
Map.of(InstanceName.from("beta"), 1,
InstanceName.from("main"), 1))
.applicationEndpoint("c", "qrs", zone1.region().value(),
Map.of(InstanceName.from("beta"), 4,
InstanceName.from("main"), 6))
.build();
ControllerTester tester = new ControllerTester(SystemName.Public);
EndpointCertificateValidatorImpl endpointCertificateValidator = new EndpointCertificateValidatorImpl(secretStore, clock);
EndpointCertificates endpointCertificates = new EndpointCertificates(tester.controller(), endpointCertificateMock, endpointCertificateValidator);
List<String> expectedSans = List.of(
"vlfms2wpoa4nyrka2s5lktucypjtxkqhv.internal.vespa-app.cloud",
"a1.t1.g.vespa-app.cloud",
"*.a1.t1.g.vespa-app.cloud",
"a1.t1.aws-us-west-2a.r.vespa-app.cloud",
"*.a1.t1.aws-us-west-2a.r.vespa-app.cloud",
"a1.t1.aws-us-east-1c.r.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.r.vespa-app.cloud",
"a1.t1.aws-us-east-1c.z.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.z.vespa-app.cloud",
"a1.t1.aws-us-east-1c.test.z.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.test.z.vespa-app.cloud",
"a1.t1.aws-us-east-1c.staging.z.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.staging.z.vespa-app.cloud"
);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(instance, zone1, applicationPackage.deploymentSpec());
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.t1.a1.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.t1.a1.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedSans, endpointCertificateMetadata.get().requestedDnsSans());
} | List<String> expectedSans = List.of( | public void includes_application_endpoint_when_declared() {
Instance instance = new Instance(ApplicationId.from("t1", "a1", "default"));
ZoneId zone1 = ZoneId.from(Environment.prod, RegionName.from("aws-us-east-1c"));
ZoneId zone2 = ZoneId.from(Environment.prod, RegionName.from("aws-us-west-2a"));
ApplicationPackage applicationPackage = new ApplicationPackageBuilder()
.instances("beta,main")
.region(zone1.region())
.region(zone2.region())
.applicationEndpoint("a", "qrs", zone2.region().value(),
Map.of(InstanceName.from("beta"), 2,
InstanceName.from("main"), 8))
.applicationEndpoint("b", "qrs", zone2.region().value(),
Map.of(InstanceName.from("beta"), 1,
InstanceName.from("main"), 1))
.applicationEndpoint("c", "qrs", zone1.region().value(),
Map.of(InstanceName.from("beta"), 4,
InstanceName.from("main"), 6))
.build();
ControllerTester tester = new ControllerTester(SystemName.Public);
EndpointCertificateValidatorImpl endpointCertificateValidator = new EndpointCertificateValidatorImpl(secretStore, clock);
EndpointCertificates endpointCertificates = new EndpointCertificates(tester.controller(), endpointCertificateMock, endpointCertificateValidator);
List<String> expectedSans = List.of(
"vlfms2wpoa4nyrka2s5lktucypjtxkqhv.internal.vespa-app.cloud",
"a1.t1.g.vespa-app.cloud",
"*.a1.t1.g.vespa-app.cloud",
"a1.t1.aws-us-west-2a.r.vespa-app.cloud",
"*.a1.t1.aws-us-west-2a.r.vespa-app.cloud",
"a1.t1.aws-us-east-1c.r.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.r.vespa-app.cloud",
"a1.t1.aws-us-east-1c.z.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.z.vespa-app.cloud",
"a1.t1.aws-us-east-1c.test.z.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.test.z.vespa-app.cloud",
"a1.t1.aws-us-east-1c.staging.z.vespa-app.cloud",
"*.a1.t1.aws-us-east-1c.staging.z.vespa-app.cloud"
);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(instance, zone1, applicationPackage.deploymentSpec());
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.t1.a1.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.t1.a1.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedSans, endpointCertificateMetadata.get().requestedDnsSans());
} | class EndpointCertificatesTest {
private final ControllerTester tester = new ControllerTester();
private final SecretStoreMock secretStore = new SecretStoreMock();
private final CuratorDb mockCuratorDb = tester.curator();
private final ManualClock clock = tester.clock();
private final EndpointCertificateMock endpointCertificateMock = new EndpointCertificateMock();
private final EndpointCertificateValidatorImpl endpointCertificateValidator = new EndpointCertificateValidatorImpl(secretStore, clock);
private final EndpointCertificates endpointCertificates = new EndpointCertificates(tester.controller(), endpointCertificateMock, endpointCertificateValidator);
private final KeyPair testKeyPair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 192);
private X509Certificate testCertificate;
private X509Certificate testCertificate2;
private static final List<String> expectedSans = List.of(
"vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.vespa.oath.cloud",
"default.default.global.vespa.oath.cloud",
"*.default.default.global.vespa.oath.cloud",
"default.default.aws-us-east-1a.vespa.oath.cloud",
"*.default.default.aws-us-east-1a.vespa.oath.cloud",
"default.default.us-east-1.test.vespa.oath.cloud",
"*.default.default.us-east-1.test.vespa.oath.cloud",
"default.default.us-east-3.staging.vespa.oath.cloud",
"*.default.default.us-east-3.staging.vespa.oath.cloud"
);
private static final List<String> expectedAdditionalSans = List.of(
"default.default.ap-northeast-1.vespa.oath.cloud",
"*.default.default.ap-northeast-1.vespa.oath.cloud"
);
private static final List<String> expectedCombinedSans = new ArrayList<>() {{
addAll(expectedSans);
addAll(expectedAdditionalSans);
}};
private static final List<String> expectedDevSans = List.of(
"vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.vespa.oath.cloud",
"default.default.us-east-1.dev.vespa.oath.cloud",
"*.default.default.us-east-1.dev.vespa.oath.cloud"
);
private X509Certificate makeTestCert(List<String> sans) {
X509CertificateBuilder x509CertificateBuilder = X509CertificateBuilder
.fromKeypair(
testKeyPair,
new X500Principal("CN=test"),
clock.instant(), clock.instant().plus(5, ChronoUnit.MINUTES),
SignatureAlgorithm.SHA256_WITH_ECDSA,
X509CertificateBuilder.generateRandomSerialNumber());
for (String san : sans) x509CertificateBuilder = x509CertificateBuilder.addSubjectAlternativeName(san);
return x509CertificateBuilder.build();
}
private final Instance testInstance = new Instance(ApplicationId.defaultId());
private final String testKeyName = "testKeyName";
private final String testCertName = "testCertName";
private ZoneId testZone;
@Before
public void setUp() {
tester.zoneRegistry().exclusiveRoutingIn(tester.zoneRegistry().zones().all().zones());
testZone = tester.zoneRegistry().zones().routingMethod(RoutingMethod.exclusive).in(Environment.prod).zones().stream().findFirst().orElseThrow().getId();
clock.setInstant(Instant.EPOCH);
testCertificate = makeTestCert(expectedSans);
testCertificate2 = makeTestCert(expectedCombinedSans);
}
@Test
public void provisions_new_certificate_in_dev() {
ZoneId testZone = tester.zoneRegistry().zones().routingMethod(RoutingMethod.exclusive).in(Environment.dev).zones().stream().findFirst().orElseThrow().getId();
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedDevSans, endpointCertificateMetadata.get().requestedDnsSans());
}
@Test
public void provisions_new_certificate_in_prod() {
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedSans, endpointCertificateMetadata.get().requestedDnsSans());
}
@Test
public void provisions_new_certificate_in_public_prod() {
ControllerTester tester = new ControllerTester(SystemName.Public);
EndpointCertificateValidatorImpl endpointCertificateValidator = new EndpointCertificateValidatorImpl(secretStore, clock);
EndpointCertificates endpointCertificates = new EndpointCertificates(tester.controller(), endpointCertificateMock, endpointCertificateValidator);
List<String> expectedSans = List.of(
"vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.internal.vespa-app.cloud",
"default.default.g.vespa-app.cloud",
"*.default.default.g.vespa-app.cloud",
"default.default.aws-us-east-1a.z.vespa-app.cloud",
"*.default.default.aws-us-east-1a.z.vespa-app.cloud",
"default.default.aws-us-east-1c.test.z.vespa-app.cloud",
"*.default.default.aws-us-east-1c.test.z.vespa-app.cloud",
"default.default.aws-us-east-1c.staging.z.vespa-app.cloud",
"*.default.default.aws-us-east-1c.staging.z.vespa-app.cloud"
);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedSans, endpointCertificateMetadata.get().requestedDnsSans());
}
@Test
public void reuses_stored_certificate_metadata() {
mockCuratorDb.writeEndpointCertificateMetadata(testInstance.id(), new EndpointCertificateMetadata(testKeyName, testCertName, 7, 0, "request_id",
List.of("vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.vespa.oath.cloud",
"default.default.global.vespa.oath.cloud",
"*.default.default.global.vespa.oath.cloud",
"default.default.aws-us-east-1a.vespa.oath.cloud",
"*.default.default.aws-us-east-1a.vespa.oath.cloud"),
"", Optional.empty(), Optional.empty()));
secretStore.setSecret(testKeyName, KeyUtils.toPem(testKeyPair.getPrivate()), 7);
secretStore.setSecret(testCertName, X509CertificateUtils.toPem(testCertificate) + X509CertificateUtils.toPem(testCertificate), 7);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertEquals(testKeyName, endpointCertificateMetadata.get().keyName());
assertEquals(testCertName, endpointCertificateMetadata.get().certName());
assertEquals(7, endpointCertificateMetadata.get().version());
}
@Test
public void reprovisions_certificate_when_necessary() {
mockCuratorDb.writeEndpointCertificateMetadata(testInstance.id(), new EndpointCertificateMetadata(testKeyName, testCertName, -1, 0, "uuid", List.of(), "issuer", Optional.empty(), Optional.empty()));
secretStore.setSecret("vespa.tls.default.default.default-key", KeyUtils.toPem(testKeyPair.getPrivate()), 0);
secretStore.setSecret("vespa.tls.default.default.default-cert", X509CertificateUtils.toPem(testCertificate) + X509CertificateUtils.toPem(testCertificate), 0);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(endpointCertificateMetadata, mockCuratorDb.readEndpointCertificateMetadata(testInstance.id()));
}
@Test
public void reprovisions_certificate_with_added_sans_when_deploying_to_new_zone() {
ZoneId testZone = tester.zoneRegistry().zones().routingMethod(RoutingMethod.exclusive).in(Environment.prod).zones().stream().skip(1).findFirst().orElseThrow().getId();
mockCuratorDb.writeEndpointCertificateMetadata(testInstance.id(), new EndpointCertificateMetadata(testKeyName, testCertName, -1, 0, "original-request-uuid", expectedSans, "mockCa", Optional.empty(), Optional.empty()));
secretStore.setSecret("vespa.tls.default.default.default-key", KeyUtils.toPem(testKeyPair.getPrivate()), -1);
secretStore.setSecret("vespa.tls.default.default.default-cert", X509CertificateUtils.toPem(testCertificate) + X509CertificateUtils.toPem(testCertificate), -1);
secretStore.setSecret("vespa.tls.default.default.default-key", KeyUtils.toPem(testKeyPair.getPrivate()), 0);
secretStore.setSecret("vespa.tls.default.default.default-cert", X509CertificateUtils.toPem(testCertificate2) + X509CertificateUtils.toPem(testCertificate2), 0);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(endpointCertificateMetadata, mockCuratorDb.readEndpointCertificateMetadata(testInstance.id()));
assertEquals("original-request-uuid", endpointCertificateMetadata.get().requestId());
assertEquals(Set.copyOf(expectedCombinedSans), Set.copyOf(endpointCertificateMetadata.get().requestedDnsSans()));
}
@Test
public void includes_zones_in_deployment_spec_when_deploying_to_staging() {
DeploymentSpec deploymentSpec = new DeploymentSpecXmlReader(true).read(
"<deployment version=\"1.0\">\n" +
" <instance id=\"default\">\n" +
" <prod>\n" +
" <region active=\"true\">aws-us-east-1a</region>\n" +
" <region active=\"true\">ap-northeast-1</region>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n");
ZoneId testZone = tester.zoneRegistry().zones().all().in(Environment.staging).zones().stream().findFirst().orElseThrow().getId();
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, deploymentSpec);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(Set.copyOf(expectedCombinedSans), Set.copyOf(endpointCertificateMetadata.get().requestedDnsSans()));
}
@Test
} | class EndpointCertificatesTest {
private final ControllerTester tester = new ControllerTester();
private final SecretStoreMock secretStore = new SecretStoreMock();
private final CuratorDb mockCuratorDb = tester.curator();
private final ManualClock clock = tester.clock();
private final EndpointCertificateMock endpointCertificateMock = new EndpointCertificateMock();
private final EndpointCertificateValidatorImpl endpointCertificateValidator = new EndpointCertificateValidatorImpl(secretStore, clock);
private final EndpointCertificates endpointCertificates = new EndpointCertificates(tester.controller(), endpointCertificateMock, endpointCertificateValidator);
private final KeyPair testKeyPair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 192);
private X509Certificate testCertificate;
private X509Certificate testCertificate2;
private static final List<String> expectedSans = List.of(
"vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.vespa.oath.cloud",
"default.default.global.vespa.oath.cloud",
"*.default.default.global.vespa.oath.cloud",
"default.default.aws-us-east-1a.vespa.oath.cloud",
"*.default.default.aws-us-east-1a.vespa.oath.cloud",
"default.default.us-east-1.test.vespa.oath.cloud",
"*.default.default.us-east-1.test.vespa.oath.cloud",
"default.default.us-east-3.staging.vespa.oath.cloud",
"*.default.default.us-east-3.staging.vespa.oath.cloud"
);
private static final List<String> expectedAdditionalSans = List.of(
"default.default.ap-northeast-1.vespa.oath.cloud",
"*.default.default.ap-northeast-1.vespa.oath.cloud"
);
private static final List<String> expectedCombinedSans = new ArrayList<>() {{
addAll(expectedSans);
addAll(expectedAdditionalSans);
}};
private static final List<String> expectedDevSans = List.of(
"vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.vespa.oath.cloud",
"default.default.us-east-1.dev.vespa.oath.cloud",
"*.default.default.us-east-1.dev.vespa.oath.cloud"
);
private X509Certificate makeTestCert(List<String> sans) {
X509CertificateBuilder x509CertificateBuilder = X509CertificateBuilder
.fromKeypair(
testKeyPair,
new X500Principal("CN=test"),
clock.instant(), clock.instant().plus(5, ChronoUnit.MINUTES),
SignatureAlgorithm.SHA256_WITH_ECDSA,
X509CertificateBuilder.generateRandomSerialNumber());
for (String san : sans) x509CertificateBuilder = x509CertificateBuilder.addSubjectAlternativeName(san);
return x509CertificateBuilder.build();
}
private final Instance testInstance = new Instance(ApplicationId.defaultId());
private final String testKeyName = "testKeyName";
private final String testCertName = "testCertName";
private ZoneId testZone;
@Before
public void setUp() {
tester.zoneRegistry().exclusiveRoutingIn(tester.zoneRegistry().zones().all().zones());
testZone = tester.zoneRegistry().zones().routingMethod(RoutingMethod.exclusive).in(Environment.prod).zones().stream().findFirst().orElseThrow().getId();
clock.setInstant(Instant.EPOCH);
testCertificate = makeTestCert(expectedSans);
testCertificate2 = makeTestCert(expectedCombinedSans);
}
@Test
public void provisions_new_certificate_in_dev() {
ZoneId testZone = tester.zoneRegistry().zones().routingMethod(RoutingMethod.exclusive).in(Environment.dev).zones().stream().findFirst().orElseThrow().getId();
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedDevSans, endpointCertificateMetadata.get().requestedDnsSans());
}
@Test
public void provisions_new_certificate_in_prod() {
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedSans, endpointCertificateMetadata.get().requestedDnsSans());
}
@Test
public void provisions_new_certificate_in_public_prod() {
ControllerTester tester = new ControllerTester(SystemName.Public);
EndpointCertificateValidatorImpl endpointCertificateValidator = new EndpointCertificateValidatorImpl(secretStore, clock);
EndpointCertificates endpointCertificates = new EndpointCertificates(tester.controller(), endpointCertificateMock, endpointCertificateValidator);
List<String> expectedSans = List.of(
"vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.internal.vespa-app.cloud",
"default.default.g.vespa-app.cloud",
"*.default.default.g.vespa-app.cloud",
"default.default.aws-us-east-1a.z.vespa-app.cloud",
"*.default.default.aws-us-east-1a.z.vespa-app.cloud",
"default.default.aws-us-east-1c.test.z.vespa-app.cloud",
"*.default.default.aws-us-east-1c.test.z.vespa-app.cloud",
"default.default.aws-us-east-1c.staging.z.vespa-app.cloud",
"*.default.default.aws-us-east-1c.staging.z.vespa-app.cloud"
);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(expectedSans, endpointCertificateMetadata.get().requestedDnsSans());
}
@Test
public void reuses_stored_certificate_metadata() {
mockCuratorDb.writeEndpointCertificateMetadata(testInstance.id(), new EndpointCertificateMetadata(testKeyName, testCertName, 7, 0, "request_id",
List.of("vt2ktgkqme5zlnp4tj4ttyor7fj3v7q5o.vespa.oath.cloud",
"default.default.global.vespa.oath.cloud",
"*.default.default.global.vespa.oath.cloud",
"default.default.aws-us-east-1a.vespa.oath.cloud",
"*.default.default.aws-us-east-1a.vespa.oath.cloud"),
"", Optional.empty(), Optional.empty()));
secretStore.setSecret(testKeyName, KeyUtils.toPem(testKeyPair.getPrivate()), 7);
secretStore.setSecret(testCertName, X509CertificateUtils.toPem(testCertificate) + X509CertificateUtils.toPem(testCertificate), 7);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertEquals(testKeyName, endpointCertificateMetadata.get().keyName());
assertEquals(testCertName, endpointCertificateMetadata.get().certName());
assertEquals(7, endpointCertificateMetadata.get().version());
}
@Test
public void reprovisions_certificate_when_necessary() {
mockCuratorDb.writeEndpointCertificateMetadata(testInstance.id(), new EndpointCertificateMetadata(testKeyName, testCertName, -1, 0, "uuid", List.of(), "issuer", Optional.empty(), Optional.empty()));
secretStore.setSecret("vespa.tls.default.default.default-key", KeyUtils.toPem(testKeyPair.getPrivate()), 0);
secretStore.setSecret("vespa.tls.default.default.default-cert", X509CertificateUtils.toPem(testCertificate) + X509CertificateUtils.toPem(testCertificate), 0);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(endpointCertificateMetadata, mockCuratorDb.readEndpointCertificateMetadata(testInstance.id()));
}
@Test
public void reprovisions_certificate_with_added_sans_when_deploying_to_new_zone() {
ZoneId testZone = tester.zoneRegistry().zones().routingMethod(RoutingMethod.exclusive).in(Environment.prod).zones().stream().skip(1).findFirst().orElseThrow().getId();
mockCuratorDb.writeEndpointCertificateMetadata(testInstance.id(), new EndpointCertificateMetadata(testKeyName, testCertName, -1, 0, "original-request-uuid", expectedSans, "mockCa", Optional.empty(), Optional.empty()));
secretStore.setSecret("vespa.tls.default.default.default-key", KeyUtils.toPem(testKeyPair.getPrivate()), -1);
secretStore.setSecret("vespa.tls.default.default.default-cert", X509CertificateUtils.toPem(testCertificate) + X509CertificateUtils.toPem(testCertificate), -1);
secretStore.setSecret("vespa.tls.default.default.default-key", KeyUtils.toPem(testKeyPair.getPrivate()), 0);
secretStore.setSecret("vespa.tls.default.default.default-cert", X509CertificateUtils.toPem(testCertificate2) + X509CertificateUtils.toPem(testCertificate2), 0);
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, DeploymentSpec.empty);
assertTrue(endpointCertificateMetadata.isPresent());
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(endpointCertificateMetadata, mockCuratorDb.readEndpointCertificateMetadata(testInstance.id()));
assertEquals("original-request-uuid", endpointCertificateMetadata.get().requestId());
assertEquals(Set.copyOf(expectedCombinedSans), Set.copyOf(endpointCertificateMetadata.get().requestedDnsSans()));
}
@Test
public void includes_zones_in_deployment_spec_when_deploying_to_staging() {
DeploymentSpec deploymentSpec = new DeploymentSpecXmlReader(true).read(
"<deployment version=\"1.0\">\n" +
" <instance id=\"default\">\n" +
" <prod>\n" +
" <region active=\"true\">aws-us-east-1a</region>\n" +
" <region active=\"true\">ap-northeast-1</region>\n" +
" </prod>\n" +
" </instance>\n" +
"</deployment>\n");
ZoneId testZone = tester.zoneRegistry().zones().all().in(Environment.staging).zones().stream().findFirst().orElseThrow().getId();
Optional<EndpointCertificateMetadata> endpointCertificateMetadata = endpointCertificates.getMetadata(testInstance, testZone, deploymentSpec);
assertTrue(endpointCertificateMetadata.isPresent());
assertTrue(endpointCertificateMetadata.get().keyName().matches("vespa.tls.default.default.*-key"));
assertTrue(endpointCertificateMetadata.get().certName().matches("vespa.tls.default.default.*-cert"));
assertEquals(0, endpointCertificateMetadata.get().version());
assertEquals(Set.copyOf(expectedCombinedSans), Set.copyOf(endpointCertificateMetadata.get().requestedDnsSans()));
}
@Test
} |
> Now if this deferred creation actually gained something that would be a different story. But it does not here. That I can see anyway. Please correct me if I missed something | private void setUpDeploymentConfiguration() {
if (project.getConfigurations().findByName(this.deploymentConfigurationName) == null) {
project.getConfigurations().register(this.deploymentConfigurationName, configuration -> {
Configuration enforcedPlatforms = this.getPlatformConfiguration();
configuration.extendsFrom(enforcedPlatforms);
configuration.getDependencies().addAllLater(project.provider(() -> {
ConditionalDependenciesEnabler cdEnabler =
new ConditionalDependenciesEnabler(project, mode, enforcedPlatforms);
final Collection<ExtensionDependency> allExtensions = cdEnabler.getAllExtensions();
Set<ExtensionDependency> extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(), allExtensions);
for (ExtensionDependency knownExtension : allExtensions) {
if (knownExtension.isConditional()) {
extensions.add(knownExtension);
}
}
final Set<ModuleVersionIdentifier> alreadyProcessed = new HashSet<>(extensions.size());
final DependencyHandler dependencies = project.getDependencies();
final Set<Dependency> deploymentDependencies = new HashSet<>();
for (ExtensionDependency extension : extensions) {
if (extension instanceof LocalExtensionDependency) {
LocalExtensionDependency localExtensionDependency = (LocalExtensionDependency) extension;
deploymentDependencies.add(
dependencies.project(Collections.singletonMap("path", localExtensionDependency.findDeploymentModulePath())));
} else {
if (!alreadyProcessed.add(extension.getExtensionId())) {
continue;
}
deploymentDependencies.add(dependencies.create(
extension.getDeploymentModule().getGroupId() + ":"
+ extension.getDeploymentModule().getArtifactId() + ":"
+ extension.getDeploymentModule().getVersion()));
}
}
return deploymentDependencies;
}));
});
}
} | project.getConfigurations().register(this.deploymentConfigurationName, configuration -> { | private void setUpDeploymentConfiguration() {
if (project.getConfigurations().findByName(this.deploymentConfigurationName) == null) {
project.getConfigurations().create(this.deploymentConfigurationName, configuration -> {
Configuration enforcedPlatforms = this.getPlatformConfiguration();
configuration.extendsFrom(enforcedPlatforms);
configuration.getDependencies().addAllLater(project.provider(() -> {
ConditionalDependenciesEnabler cdEnabler = new ConditionalDependenciesEnabler(project, mode,
enforcedPlatforms);
final Collection<ExtensionDependency> allExtensions = cdEnabler.getAllExtensions();
Set<ExtensionDependency> extensions = collectFirstMetQuarkusExtensions(getRawRuntimeConfiguration(),
allExtensions);
for (ExtensionDependency knownExtension : allExtensions) {
if (knownExtension.isConditional()) {
extensions.add(knownExtension);
}
}
final Set<ModuleVersionIdentifier> alreadyProcessed = new HashSet<>(extensions.size());
final DependencyHandler dependencies = project.getDependencies();
final Set<Dependency> deploymentDependencies = new HashSet<>();
for (ExtensionDependency extension : extensions) {
if (extension instanceof LocalExtensionDependency) {
LocalExtensionDependency localExtensionDependency = (LocalExtensionDependency) extension;
deploymentDependencies.add(
dependencies.project(Collections.singletonMap("path",
localExtensionDependency.findDeploymentModulePath())));
} else {
if (!alreadyProcessed.add(extension.getExtensionId())) {
continue;
}
deploymentDependencies.add(dependencies.create(
extension.getDeploymentModule().getGroupId() + ":"
+ extension.getDeploymentModule().getArtifactId() + ":"
+ extension.getDeploymentModule().getVersion()));
}
}
return deploymentDependencies;
}));
});
}
} | class ApplicationDeploymentClasspathBuilder {
private static String getRuntimeConfigName(LaunchMode mode, boolean base) {
final StringBuilder sb = new StringBuilder();
sb.append("quarkus");
if (mode == LaunchMode.DEVELOPMENT) {
sb.append("Dev");
} else if (mode == LaunchMode.TEST) {
sb.append("Test");
} else {
sb.append("Prod");
}
if (base) {
sb.append("Base");
}
sb.append("RuntimeClasspathConfiguration");
return sb.toString();
}
public static String getBaseRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, true);
}
public static String getFinalRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, false);
}
public static void initConfigurations(Project project) {
final ConfigurationContainer configContainer = project.getConfigurations();
configContainer.create(ToolingUtils.DEV_MODE_CONFIGURATION_NAME)
.extendsFrom(configContainer.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.TEST))
.extendsFrom(configContainer.getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.NORMAL))
.extendsFrom(configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.DEVELOPMENT))
.extendsFrom(
configContainer.getByName(ToolingUtils.DEV_MODE_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME)
.withDependencies(annotationProcessors -> {
Set<ResolvedArtifact> compileClasspathArtifacts = DependencyUtils
.duplicateConfiguration(project, configContainer
.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME))
.getResolvedConfiguration()
.getResolvedArtifacts();
for (ResolvedArtifact artifact : compileClasspathArtifacts) {
if ("quarkus-panache-common".equals(artifact.getName())
&& "io.quarkus".equals(artifact.getModuleVersion().getId().getGroup())) {
project.getDependencies().add(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME,
"io.quarkus:quarkus-panache-common:" + artifact.getModuleVersion().getId().getVersion());
}
}
});
}
private final Project project;
private final LaunchMode mode;
private final String runtimeConfigurationName;
private final String platformConfigurationName;
private final String deploymentConfigurationName;
/**
* The platform configuration updates the PlatformImports, but since the PlatformImports don't
* have a place to be stored in the project, they're stored here. The way that extensions are
* tracked and conditional dependencies needs some attention, which will likely resolve this.
*/
private static final HashMap<String, PlatformImportsImpl> platformImports = new HashMap<>();
/**
* The key used to look up the correct PlatformImports that matches the platformConfigurationName
*/
private final String platformImportName;
public ApplicationDeploymentClasspathBuilder(Project project, LaunchMode mode) {
this.project = project;
this.mode = mode;
this.runtimeConfigurationName = getFinalRuntimeConfigName(mode);
this.platformConfigurationName = ToolingUtils.toPlatformConfigurationName(this.runtimeConfigurationName);
this.deploymentConfigurationName = ToolingUtils.toDeploymentConfigurationName(this.runtimeConfigurationName);
this.platformImportName = project.getPath() + ":" + this.platformConfigurationName;
setUpPlatformConfiguration();
setUpRuntimeConfiguration();
setUpDeploymentConfiguration();
}
private void setUpPlatformConfiguration() {
if (project.getConfigurations().findByName(this.platformConfigurationName) == null) {
PlatformImportsImpl platformImports =
ApplicationDeploymentClasspathBuilder.platformImports.computeIfAbsent(this.platformImportName, (ignored) -> new PlatformImportsImpl());
project.getConfigurations().register(this.platformConfigurationName, configuration -> {
configuration.getDependencies().addAllLater(project.provider(() ->
project.getConfigurations()
.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME)
.getAllDependencies()
.stream()
.filter(dependency ->
dependency instanceof ModuleDependency &&
ToolingUtils.isEnforcedPlatform((ModuleDependency) dependency))
.collect(Collectors.toList())
));
configuration.getResolutionStrategy().eachDependency(d -> {
ModuleIdentifier identifier = d.getTarget().getModule();
final String group = identifier.getGroup();
final String name = identifier.getName();
if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) {
platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), "json",
d.getTarget().getVersion());
} else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) {
final DefaultDependencyArtifact dep = new DefaultDependencyArtifact();
dep.setExtension("properties");
dep.setType("properties");
dep.setName(name);
final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(
group, name, d.getTarget().getVersion(), null);
gradleDep.addArtifact(dep);
for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep)
.getResolvedConfiguration().getResolvedArtifacts()) {
if (a.getName().equals(name)) {
try {
platformImports.addPlatformProperties(group, name, null, "properties", d.getTarget().getVersion(),
a.getFile().toPath());
} catch (AppModelResolverException e) {
throw new GradleException("Failed to import platform properties " + a.getFile(), e);
}
break;
}
}
}
});
});
}
}
private void setUpRuntimeConfiguration() {
if (project.getConfigurations().findByName(this.runtimeConfigurationName) == null) {
project.getConfigurations().register(this.runtimeConfigurationName, configuration ->
configuration.extendsFrom(
project.getConfigurations()
.getByName(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(mode))));
}
}
public Configuration getPlatformConfiguration() {
return project.getConfigurations().getByName(this.platformConfigurationName);
}
private Configuration getRawRuntimeConfiguration() {
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
/**
* Forces deployment configuration to resolve to discover conditional dependencies.
*/
public Configuration getRuntimeConfiguration() {
this.getDeploymentConfiguration().resolve();
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
public Configuration getDeploymentConfiguration() {
return project.getConfigurations().getByName(this.deploymentConfigurationName);
}
/**
* Forces the platform configuration to resolve and then uses that to populate platform imports.
*/
public PlatformImports getPlatformImports() {
this.getPlatformConfiguration().getResolvedConfiguration();
return platformImports.get(this.platformImportName);
}
private Set<ExtensionDependency> collectFirstMetQuarkusExtensions(Configuration configuration,
Collection<ExtensionDependency> knownExtensions) {
Set<ExtensionDependency> firstLevelExtensions = new HashSet<>();
Set<ResolvedDependency> firstLevelModuleDependencies = configuration.getResolvedConfiguration()
.getFirstLevelModuleDependencies();
Set<String> visitedArtifacts = new HashSet<>();
for (ResolvedDependency firstLevelModuleDependency : firstLevelModuleDependencies) {
firstLevelExtensions
.addAll(collectQuarkusExtensions(firstLevelModuleDependency, visitedArtifacts, knownExtensions));
}
return firstLevelExtensions;
}
private Set<ExtensionDependency> collectQuarkusExtensions(ResolvedDependency dependency, Set<String> visitedArtifacts,
Collection<ExtensionDependency> knownExtensions) {
String artifactKey = String.format("%s:%s", dependency.getModuleGroup(), dependency.getModuleName());
if (!visitedArtifacts.add(artifactKey)) {
return Collections.emptySet();
}
Set<ExtensionDependency> extensions = new LinkedHashSet<>();
ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(),
dependency.getModuleVersion(), knownExtensions);
if (extension != null) {
extensions.add(extension);
} else {
for (ResolvedDependency child : dependency.getChildren()) {
extensions.addAll(collectQuarkusExtensions(child, visitedArtifacts, knownExtensions));
}
}
return extensions;
}
private ExtensionDependency getExtensionOrNull(String group, String artifact, String version,
Collection<ExtensionDependency> knownExtensions) {
for (ExtensionDependency knownExtension : knownExtensions) {
if (group.equals(knownExtension.getGroup()) && artifact.equals(knownExtension.getName())
&& version.equals(knownExtension.getVersion())) {
return knownExtension;
}
}
return null;
}
} | class ApplicationDeploymentClasspathBuilder {
private static String getRuntimeConfigName(LaunchMode mode, boolean base) {
final StringBuilder sb = new StringBuilder();
sb.append("quarkus");
if (mode == LaunchMode.DEVELOPMENT) {
sb.append("Dev");
} else if (mode == LaunchMode.TEST) {
sb.append("Test");
} else {
sb.append("Prod");
}
if (base) {
sb.append("Base");
}
sb.append("RuntimeClasspathConfiguration");
return sb.toString();
}
public static String getBaseRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, true);
}
public static String getFinalRuntimeConfigName(LaunchMode mode) {
return getRuntimeConfigName(mode, false);
}
public static void initConfigurations(Project project) {
final ConfigurationContainer configContainer = project.getConfigurations();
configContainer.create(ToolingUtils.DEV_MODE_CONFIGURATION_NAME)
.extendsFrom(configContainer.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.TEST))
.extendsFrom(configContainer.getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.NORMAL))
.extendsFrom(configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.create(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(LaunchMode.DEVELOPMENT))
.extendsFrom(
configContainer.getByName(ToolingUtils.DEV_MODE_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME),
configContainer.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME));
configContainer.getByName(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME)
.withDependencies(annotationProcessors -> {
Set<ResolvedArtifact> compileClasspathArtifacts = DependencyUtils
.duplicateConfiguration(project, configContainer
.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME))
.getResolvedConfiguration()
.getResolvedArtifacts();
for (ResolvedArtifact artifact : compileClasspathArtifacts) {
if ("quarkus-panache-common".equals(artifact.getName())
&& "io.quarkus".equals(artifact.getModuleVersion().getId().getGroup())) {
project.getDependencies().add(JavaPlugin.ANNOTATION_PROCESSOR_CONFIGURATION_NAME,
"io.quarkus:quarkus-panache-common:" + artifact.getModuleVersion().getId().getVersion());
}
}
});
}
private final Project project;
private final LaunchMode mode;
private final String runtimeConfigurationName;
private final String platformConfigurationName;
private final String deploymentConfigurationName;
/**
* The platform configuration updates the PlatformImports, but since the PlatformImports don't
* have a place to be stored in the project, they're stored here. The way that extensions are
* tracked and conditional dependencies needs some attention, which will likely resolve this.
*/
private static final HashMap<String, PlatformImportsImpl> platformImports = new HashMap<>();
/**
* The key used to look up the correct PlatformImports that matches the platformConfigurationName
*/
private final String platformImportName;
public ApplicationDeploymentClasspathBuilder(Project project, LaunchMode mode) {
this.project = project;
this.mode = mode;
this.runtimeConfigurationName = getFinalRuntimeConfigName(mode);
this.platformConfigurationName = ToolingUtils.toPlatformConfigurationName(this.runtimeConfigurationName);
this.deploymentConfigurationName = ToolingUtils.toDeploymentConfigurationName(this.runtimeConfigurationName);
this.platformImportName = project.getPath() + ":" + this.platformConfigurationName;
setUpPlatformConfiguration();
setUpRuntimeConfiguration();
setUpDeploymentConfiguration();
}
private void setUpPlatformConfiguration() {
if (project.getConfigurations().findByName(this.platformConfigurationName) == null) {
PlatformImportsImpl platformImports = ApplicationDeploymentClasspathBuilder.platformImports
.computeIfAbsent(this.platformImportName, (ignored) -> new PlatformImportsImpl());
project.getConfigurations().create(this.platformConfigurationName, configuration -> {
configuration.getDependencies().addAllLater(project.provider(() -> project.getConfigurations()
.getByName(JavaPlugin.IMPLEMENTATION_CONFIGURATION_NAME)
.getAllDependencies()
.stream()
.filter(dependency -> dependency instanceof ModuleDependency &&
ToolingUtils.isEnforcedPlatform((ModuleDependency) dependency))
.collect(Collectors.toList())));
configuration.getResolutionStrategy().eachDependency(d -> {
ModuleIdentifier identifier = d.getTarget().getModule();
final String group = identifier.getGroup();
final String name = identifier.getName();
if (name.endsWith(BootstrapConstants.PLATFORM_DESCRIPTOR_ARTIFACT_ID_SUFFIX)) {
platformImports.addPlatformDescriptor(group, name, d.getTarget().getVersion(), "json",
d.getTarget().getVersion());
} else if (name.endsWith(BootstrapConstants.PLATFORM_PROPERTIES_ARTIFACT_ID_SUFFIX)) {
final DefaultDependencyArtifact dep = new DefaultDependencyArtifact();
dep.setExtension("properties");
dep.setType("properties");
dep.setName(name);
final DefaultExternalModuleDependency gradleDep = new DefaultExternalModuleDependency(
group, name, d.getTarget().getVersion(), null);
gradleDep.addArtifact(dep);
for (ResolvedArtifact a : project.getConfigurations().detachedConfiguration(gradleDep)
.getResolvedConfiguration().getResolvedArtifacts()) {
if (a.getName().equals(name)) {
try {
platformImports.addPlatformProperties(group, name, null, "properties",
d.getTarget().getVersion(),
a.getFile().toPath());
} catch (AppModelResolverException e) {
throw new GradleException("Failed to import platform properties " + a.getFile(), e);
}
break;
}
}
}
});
});
}
}
private void setUpRuntimeConfiguration() {
if (project.getConfigurations().findByName(this.runtimeConfigurationName) == null) {
project.getConfigurations().create(this.runtimeConfigurationName, configuration -> configuration.extendsFrom(
project.getConfigurations()
.getByName(ApplicationDeploymentClasspathBuilder.getBaseRuntimeConfigName(mode))));
}
}
public Configuration getPlatformConfiguration() {
return project.getConfigurations().getByName(this.platformConfigurationName);
}
private Configuration getRawRuntimeConfiguration() {
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
/**
* Forces deployment configuration to resolve to discover conditional dependencies.
*/
public Configuration getRuntimeConfiguration() {
this.getDeploymentConfiguration().resolve();
return project.getConfigurations().getByName(this.runtimeConfigurationName);
}
public Configuration getDeploymentConfiguration() {
return project.getConfigurations().getByName(this.deploymentConfigurationName);
}
/**
* Forces the platform configuration to resolve and then uses that to populate platform imports.
*/
public PlatformImports getPlatformImports() {
this.getPlatformConfiguration().getResolvedConfiguration();
return platformImports.get(this.platformImportName);
}
private Set<ExtensionDependency> collectFirstMetQuarkusExtensions(Configuration configuration,
Collection<ExtensionDependency> knownExtensions) {
Set<ExtensionDependency> firstLevelExtensions = new HashSet<>();
Set<ResolvedDependency> firstLevelModuleDependencies = configuration.getResolvedConfiguration()
.getFirstLevelModuleDependencies();
Set<String> visitedArtifacts = new HashSet<>();
for (ResolvedDependency firstLevelModuleDependency : firstLevelModuleDependencies) {
firstLevelExtensions
.addAll(collectQuarkusExtensions(firstLevelModuleDependency, visitedArtifacts, knownExtensions));
}
return firstLevelExtensions;
}
private Set<ExtensionDependency> collectQuarkusExtensions(ResolvedDependency dependency, Set<String> visitedArtifacts,
Collection<ExtensionDependency> knownExtensions) {
String artifactKey = String.format("%s:%s", dependency.getModuleGroup(), dependency.getModuleName());
if (!visitedArtifacts.add(artifactKey)) {
return Collections.emptySet();
}
Set<ExtensionDependency> extensions = new LinkedHashSet<>();
ExtensionDependency extension = getExtensionOrNull(dependency.getModuleGroup(), dependency.getModuleName(),
dependency.getModuleVersion(), knownExtensions);
if (extension != null) {
extensions.add(extension);
} else {
for (ResolvedDependency child : dependency.getChildren()) {
extensions.addAll(collectQuarkusExtensions(child, visitedArtifacts, knownExtensions));
}
}
return extensions;
}
private ExtensionDependency getExtensionOrNull(String group, String artifact, String version,
Collection<ExtensionDependency> knownExtensions) {
for (ExtensionDependency knownExtension : knownExtensions) {
if (group.equals(knownExtension.getGroup()) && artifact.equals(knownExtension.getName())
&& version.equals(knownExtension.getVersion())) {
return knownExtension;
}
}
return null;
}
} |
Could there be a scenario where a user does not want to retry on UNAVAILABLE? This code would always add UNAVAILABLE even if the user specified their own set of retryable codes, and could therefore lead to unexpected behaviour.. | private static SpannerAccessor createAndConnect(SpannerConfig spannerConfig) {
SpannerOptions.Builder builder = SpannerOptions.newBuilder();
Set<Code> retryableCodes = new HashSet<>();
retryableCodes.add(Code.UNAVAILABLE);
if (spannerConfig.getRetryableCodes() != null) {
retryableCodes.addAll(spannerConfig.getRetryableCodes());
}
if (spannerConfig.getDataBoostEnabled() != null && spannerConfig.getDataBoostEnabled().get()) {
retryableCodes.add(Code.RESOURCE_EXHAUSTED);
}
builder
.getSpannerStubSettingsBuilder()
.applyToAllUnaryMethods(
input -> {
input.setRetryableCodes(retryableCodes);
return null;
});
builder
.getSpannerStubSettingsBuilder()
.executeStreamingSqlSettings()
.setRetryableCodes(retryableCodes);
UnaryCallSettings.Builder<CommitRequest, CommitResponse> commitSettings =
builder.getSpannerStubSettingsBuilder().commitSettings();
ValueProvider<Duration> commitDeadline = spannerConfig.getCommitDeadline();
if (spannerConfig.getCommitRetrySettings() != null) {
commitSettings.setRetrySettings(spannerConfig.getCommitRetrySettings());
} else if (commitDeadline != null && commitDeadline.get().getMillis() > 0) {
RetrySettings.Builder commitRetrySettingsBuilder =
commitSettings.getRetrySettings().toBuilder();
commitSettings.setRetrySettings(
commitRetrySettingsBuilder
.setTotalTimeout(org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.setMaxRpcTimeout(org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.setInitialRpcTimeout(
org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.build());
}
ServerStreamingCallSettings.Builder<ExecuteSqlRequest, PartialResultSet>
executeStreamingSqlSettings =
builder.getSpannerStubSettingsBuilder().executeStreamingSqlSettings();
if (spannerConfig.getExecuteStreamingSqlRetrySettings() != null) {
executeStreamingSqlSettings.setRetrySettings(
spannerConfig.getExecuteStreamingSqlRetrySettings());
} else {
RetrySettings.Builder executeSqlStreamingRetrySettings =
executeStreamingSqlSettings.getRetrySettings().toBuilder();
executeStreamingSqlSettings.setRetrySettings(
executeSqlStreamingRetrySettings
.setInitialRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
.setMaxRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
.setTotalTimeout(org.threeten.bp.Duration.ofMinutes(120))
.build());
}
SpannerStubSettings.Builder spannerStubSettingsBuilder =
builder.getSpannerStubSettingsBuilder();
ValueProvider<Duration> partitionQueryTimeout = spannerConfig.getPartitionQueryTimeout();
if (partitionQueryTimeout != null
&& partitionQueryTimeout.get() != null
&& partitionQueryTimeout.get().getMillis() > 0) {
spannerStubSettingsBuilder
.partitionQuerySettings()
.setSimpleTimeoutNoRetries(
org.threeten.bp.Duration.ofMillis(partitionQueryTimeout.get().getMillis()));
}
ValueProvider<Duration> partitionReadTimeout = spannerConfig.getPartitionReadTimeout();
if (partitionReadTimeout != null
&& partitionReadTimeout.get() != null
&& partitionReadTimeout.get().getMillis() > 0) {
spannerStubSettingsBuilder
.partitionReadSettings()
.setSimpleTimeoutNoRetries(
org.threeten.bp.Duration.ofMillis(partitionReadTimeout.get().getMillis()));
}
ValueProvider<String> projectId = spannerConfig.getProjectId();
if (projectId != null) {
builder.setProjectId(projectId.get());
}
ServiceFactory<Spanner, SpannerOptions> serviceFactory = spannerConfig.getServiceFactory();
if (serviceFactory != null) {
builder.setServiceFactory(serviceFactory);
}
ValueProvider<String> host = spannerConfig.getHost();
if (host != null) {
builder.setHost(host.get());
}
ValueProvider<String> emulatorHost = spannerConfig.getEmulatorHost();
if (emulatorHost != null) {
builder.setEmulatorHost(emulatorHost.get());
if (spannerConfig.getIsLocalChannelProvider() != null
&& spannerConfig.getIsLocalChannelProvider().get()) {
builder.setChannelProvider(LocalChannelProvider.create(emulatorHost.get()));
}
builder.setCredentials(NoCredentials.getInstance());
}
String userAgentString = USER_AGENT_PREFIX + "/" + ReleaseInfo.getReleaseInfo().getVersion();
builder.setHeaderProvider(FixedHeaderProvider.create("user-agent", userAgentString));
ValueProvider<String> databaseRole = spannerConfig.getDatabaseRole();
if (databaseRole != null && databaseRole.get() != null && !databaseRole.get().isEmpty()) {
builder.setDatabaseRole(databaseRole.get());
}
SpannerOptions options = builder.build();
Spanner spanner = options.getService();
String instanceId = spannerConfig.getInstanceId().get();
String databaseId = spannerConfig.getDatabaseId().get();
DatabaseClient databaseClient =
spanner.getDatabaseClient(DatabaseId.of(options.getProjectId(), instanceId, databaseId));
BatchClient batchClient =
spanner.getBatchClient(DatabaseId.of(options.getProjectId(), instanceId, databaseId));
DatabaseAdminClient databaseAdminClient = spanner.getDatabaseAdminClient();
return new SpannerAccessor(
spanner, databaseClient, databaseAdminClient, batchClient, spannerConfig);
} | retryableCodes.add(Code.UNAVAILABLE); | private static SpannerAccessor createAndConnect(SpannerConfig spannerConfig) {
SpannerOptions.Builder builder = SpannerOptions.newBuilder();
Set<Code> retryableCodes = new HashSet<>();
if (spannerConfig.getRetryableCodes() != null) {
retryableCodes.addAll(spannerConfig.getRetryableCodes());
}
if (spannerConfig.getDataBoostEnabled() != null && spannerConfig.getDataBoostEnabled().get()) {
retryableCodes.add(Code.RESOURCE_EXHAUSTED);
}
Set<Code> unaryMethodRetryableCodes = new HashSet<>(retryableCodes);
unaryMethodRetryableCodes.addAll(
builder.getSpannerStubSettingsBuilder().getSessionSettings().getRetryableCodes());
builder
.getSpannerStubSettingsBuilder()
.applyToAllUnaryMethods(
input -> {
input.setRetryableCodes(unaryMethodRetryableCodes);
return null;
});
Set<Code> streamingMethodRetryableCodes = new HashSet<>(retryableCodes);
streamingMethodRetryableCodes.addAll(
builder.getSpannerStubSettingsBuilder().executeStreamingSqlSettings().getRetryableCodes());
builder
.getSpannerStubSettingsBuilder()
.executeStreamingSqlSettings()
.setRetryableCodes(streamingMethodRetryableCodes);
builder
.getSpannerStubSettingsBuilder()
.streamingReadSettings()
.setRetryableCodes(streamingMethodRetryableCodes);
UnaryCallSettings.Builder<CommitRequest, CommitResponse> commitSettings =
builder.getSpannerStubSettingsBuilder().commitSettings();
ValueProvider<Duration> commitDeadline = spannerConfig.getCommitDeadline();
if (spannerConfig.getCommitRetrySettings() != null) {
commitSettings.setRetrySettings(spannerConfig.getCommitRetrySettings());
} else if (commitDeadline != null && commitDeadline.get().getMillis() > 0) {
RetrySettings.Builder commitRetrySettingsBuilder =
commitSettings.getRetrySettings().toBuilder();
commitSettings.setRetrySettings(
commitRetrySettingsBuilder
.setTotalTimeout(org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.setMaxRpcTimeout(org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.setInitialRpcTimeout(
org.threeten.bp.Duration.ofMillis(commitDeadline.get().getMillis()))
.build());
}
ServerStreamingCallSettings.Builder<ExecuteSqlRequest, PartialResultSet>
executeStreamingSqlSettings =
builder.getSpannerStubSettingsBuilder().executeStreamingSqlSettings();
if (spannerConfig.getExecuteStreamingSqlRetrySettings() != null) {
executeStreamingSqlSettings.setRetrySettings(
spannerConfig.getExecuteStreamingSqlRetrySettings());
} else {
RetrySettings.Builder executeSqlStreamingRetrySettings =
executeStreamingSqlSettings.getRetrySettings().toBuilder();
executeStreamingSqlSettings.setRetrySettings(
executeSqlStreamingRetrySettings
.setInitialRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
.setMaxRpcTimeout(org.threeten.bp.Duration.ofMinutes(120))
.setTotalTimeout(org.threeten.bp.Duration.ofMinutes(120))
.build());
}
SpannerStubSettings.Builder spannerStubSettingsBuilder =
builder.getSpannerStubSettingsBuilder();
ValueProvider<Duration> partitionQueryTimeout = spannerConfig.getPartitionQueryTimeout();
if (partitionQueryTimeout != null
&& partitionQueryTimeout.get() != null
&& partitionQueryTimeout.get().getMillis() > 0) {
spannerStubSettingsBuilder
.partitionQuerySettings()
.setSimpleTimeoutNoRetries(
org.threeten.bp.Duration.ofMillis(partitionQueryTimeout.get().getMillis()));
}
ValueProvider<Duration> partitionReadTimeout = spannerConfig.getPartitionReadTimeout();
if (partitionReadTimeout != null
&& partitionReadTimeout.get() != null
&& partitionReadTimeout.get().getMillis() > 0) {
spannerStubSettingsBuilder
.partitionReadSettings()
.setSimpleTimeoutNoRetries(
org.threeten.bp.Duration.ofMillis(partitionReadTimeout.get().getMillis()));
}
ValueProvider<String> projectId = spannerConfig.getProjectId();
if (projectId != null) {
builder.setProjectId(projectId.get());
}
ServiceFactory<Spanner, SpannerOptions> serviceFactory = spannerConfig.getServiceFactory();
if (serviceFactory != null) {
builder.setServiceFactory(serviceFactory);
}
ValueProvider<String> host = spannerConfig.getHost();
if (host != null) {
builder.setHost(host.get());
}
ValueProvider<String> emulatorHost = spannerConfig.getEmulatorHost();
if (emulatorHost != null) {
builder.setEmulatorHost(emulatorHost.get());
if (spannerConfig.getIsLocalChannelProvider() != null
&& spannerConfig.getIsLocalChannelProvider().get()) {
builder.setChannelProvider(LocalChannelProvider.create(emulatorHost.get()));
}
builder.setCredentials(NoCredentials.getInstance());
}
String userAgentString = USER_AGENT_PREFIX + "/" + ReleaseInfo.getReleaseInfo().getVersion();
builder.setHeaderProvider(FixedHeaderProvider.create("user-agent", userAgentString));
ValueProvider<String> databaseRole = spannerConfig.getDatabaseRole();
if (databaseRole != null && databaseRole.get() != null && !databaseRole.get().isEmpty()) {
builder.setDatabaseRole(databaseRole.get());
}
SpannerOptions options = builder.build();
Spanner spanner = options.getService();
String instanceId = spannerConfig.getInstanceId().get();
String databaseId = spannerConfig.getDatabaseId().get();
DatabaseClient databaseClient =
spanner.getDatabaseClient(DatabaseId.of(options.getProjectId(), instanceId, databaseId));
BatchClient batchClient =
spanner.getBatchClient(DatabaseId.of(options.getProjectId(), instanceId, databaseId));
DatabaseAdminClient databaseAdminClient = spanner.getDatabaseAdminClient();
return new SpannerAccessor(
spanner, databaseClient, databaseAdminClient, batchClient, spannerConfig);
} | class SpannerAccessor implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(SpannerAccessor.class);
/* A common user agent token that indicates that this request was originated from
* Apache Beam. Setting the user-agent allows Cloud Spanner to detect that the
* workload is coming from Dataflow and to potentially apply performance optimizations
*/
private static final String USER_AGENT_PREFIX = "Apache_Beam_Java";
private static final ConcurrentHashMap<SpannerConfig, SpannerAccessor> spannerAccessors =
new ConcurrentHashMap<>();
private static final ConcurrentHashMap<SpannerConfig, AtomicInteger> refcounts =
new ConcurrentHashMap<>();
private final Spanner spanner;
private final DatabaseClient databaseClient;
private final BatchClient batchClient;
private final DatabaseAdminClient databaseAdminClient;
private final SpannerConfig spannerConfig;
private SpannerAccessor(
Spanner spanner,
DatabaseClient databaseClient,
DatabaseAdminClient databaseAdminClient,
BatchClient batchClient,
SpannerConfig spannerConfig) {
this.spanner = spanner;
this.databaseClient = databaseClient;
this.databaseAdminClient = databaseAdminClient;
this.batchClient = batchClient;
this.spannerConfig = spannerConfig;
}
public static SpannerAccessor getOrCreate(SpannerConfig spannerConfig) {
SpannerAccessor self = spannerAccessors.get(spannerConfig);
if (self == null) {
synchronized (spannerAccessors) {
self = spannerAccessors.get(spannerConfig);
if (self == null) {
LOG.info("Connecting to {}", spannerConfig);
self = SpannerAccessor.createAndConnect(spannerConfig);
spannerAccessors.put(spannerConfig, self);
refcounts.putIfAbsent(spannerConfig, new AtomicInteger(0));
}
}
}
int refcount = refcounts.get(spannerConfig).incrementAndGet();
LOG.debug("getOrCreate(): refcount={} for {}", refcount, spannerConfig);
return self;
}
public DatabaseClient getDatabaseClient() {
return databaseClient;
}
public BatchClient getBatchClient() {
return batchClient;
}
public DatabaseAdminClient getDatabaseAdminClient() {
return databaseAdminClient;
}
@Override
public void close() {
int refcount = refcounts.getOrDefault(spannerConfig, new AtomicInteger(0)).decrementAndGet();
LOG.debug("close(): refcount={} for {}", refcount, spannerConfig);
if (refcount == 0) {
synchronized (spannerAccessors) {
if (refcounts.get(spannerConfig).get() <= 0) {
spannerAccessors.remove(spannerConfig);
refcounts.remove(spannerConfig);
LOG.info("Closing {} ", spannerConfig);
spanner.close();
}
}
}
}
} | class SpannerAccessor implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(SpannerAccessor.class);
/* A common user agent token that indicates that this request was originated from
* Apache Beam. Setting the user-agent allows Cloud Spanner to detect that the
* workload is coming from Dataflow and to potentially apply performance optimizations
*/
private static final String USER_AGENT_PREFIX = "Apache_Beam_Java";
private static final ConcurrentHashMap<SpannerConfig, SpannerAccessor> spannerAccessors =
new ConcurrentHashMap<>();
private static final ConcurrentHashMap<SpannerConfig, AtomicInteger> refcounts =
new ConcurrentHashMap<>();
private final Spanner spanner;
private final DatabaseClient databaseClient;
private final BatchClient batchClient;
private final DatabaseAdminClient databaseAdminClient;
private final SpannerConfig spannerConfig;
private SpannerAccessor(
Spanner spanner,
DatabaseClient databaseClient,
DatabaseAdminClient databaseAdminClient,
BatchClient batchClient,
SpannerConfig spannerConfig) {
this.spanner = spanner;
this.databaseClient = databaseClient;
this.databaseAdminClient = databaseAdminClient;
this.batchClient = batchClient;
this.spannerConfig = spannerConfig;
}
public static SpannerAccessor getOrCreate(SpannerConfig spannerConfig) {
SpannerAccessor self = spannerAccessors.get(spannerConfig);
if (self == null) {
synchronized (spannerAccessors) {
self = spannerAccessors.get(spannerConfig);
if (self == null) {
LOG.info("Connecting to {}", spannerConfig);
self = SpannerAccessor.createAndConnect(spannerConfig);
spannerAccessors.put(spannerConfig, self);
refcounts.putIfAbsent(spannerConfig, new AtomicInteger(0));
}
}
}
int refcount = refcounts.get(spannerConfig).incrementAndGet();
LOG.debug("getOrCreate(): refcount={} for {}", refcount, spannerConfig);
return self;
}
public DatabaseClient getDatabaseClient() {
return databaseClient;
}
public BatchClient getBatchClient() {
return batchClient;
}
public DatabaseAdminClient getDatabaseAdminClient() {
return databaseAdminClient;
}
@Override
public void close() {
int refcount = refcounts.getOrDefault(spannerConfig, new AtomicInteger(0)).decrementAndGet();
LOG.debug("close(): refcount={} for {}", refcount, spannerConfig);
if (refcount == 0) {
synchronized (spannerAccessors) {
if (refcounts.get(spannerConfig).get() <= 0) {
spannerAccessors.remove(spannerConfig);
refcounts.remove(spannerConfig);
LOG.info("Closing {} ", spannerConfig);
spanner.close();
}
}
}
}
} |
Speaking of exceptions (not specific to this line, but in general): for hybrid, are you swallowing (or even logging) cryptographic exceptions from the local crypto client and calling remote? | private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key endpoint in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key name in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key version in key identifier is invalid."));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed.", e));
}
} | throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed.", e)); | private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key endpoint in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key name in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key version in key identifier is invalid."));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed.", e));
}
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
static final String KEYVAULT_TRACING_NAMESPACE_VALUE = "Microsoft.KeyVault";
JsonWebKey key;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private final CryptographyService service;
private final String keyId;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private String keyCollection;
/**
* Creates a {@link CryptographyAsyncClient} that uses a given {@link HttpPipeline pipeline} to service requests.
*
* @param keyId The Azure Key Vault key identifier to use for cryptography operations.
* @param pipeline {@link HttpPipeline} that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
this.service = RestProxy.create(CryptographyService.class, pipeline);
this.cryptographyServiceClient = new CryptographyServiceClient(keyId, service, version);
this.key = null;
}
/**
* Creates a {@link CryptographyAsyncClient} that uses a {@link JsonWebKey} to perform local cryptography
* operations.
*
* @param jsonWebKey The {@link JsonWebKey} to use for local cryptography operations.
*/
CryptographyAsyncClient(JsonWebKey jsonWebKey) {
Objects.requireNonNull(jsonWebKey, "The JSON Web Key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("The JSON Web Key is not valid.");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("The JSON Web Key's key operations property is not configured.");
}
if (jsonWebKey.getKeyType() == null) {
throw new IllegalArgumentException("The JSON Web Key's key type property is not configured.");
}
this.key = jsonWebKey;
this.keyId = null;
this.service = null;
this.cryptographyServiceClient = null;
initializeCryptoClients();
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) {
this.localKeyCryptographyClient = new RsaKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) {
this.localKeyCryptographyClient = new EcKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(OCT) || key.getKeyType().equals(OCT_HSM)) {
this.localKeyCryptographyClient =
new SymmetricKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The JSON Web Key type: %s is not supported.", this.key.getKeyType().toString())));
}
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(this.keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(this::getKeyWithResponse);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
if (cryptographyServiceClient != null) {
return cryptographyServiceClient.getKey(context);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(
"Operation not supported when an Azure Key Vault key identifier was not provided when creating this "
+ "client"));
}
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context))
.flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting
* the specified {@code plainText}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plainText The content to be encrypted.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plainText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plainText) {
return encrypt(new EncryptOptions(algorithm, plainText, null, null), null);
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting
* the specified {@code plainText}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param encryptOptions The parameters to use in the encryption operation.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plainText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptOptions encryptOptions) {
Objects.requireNonNull(encryptOptions, "'encryptOptions' cannot be null.");
try {
return withContext(context -> encrypt(encryptOptions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptOptions encryptOptions, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(encryptOptions, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Encrypt operation is missing permission/not supported for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.encryptAsync(encryptOptions, context, key);
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param cipherText The content to be decrypted.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code cipherText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) {
return decrypt(new DecryptOptions(algorithm, cipherText, null, null, null));
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param decryptOptions The parameters to use in the decryption operation.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code cipherText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(DecryptOptions decryptOptions) {
Objects.requireNonNull(decryptOptions, "'decryptOptions' cannot be null.");
try {
return withContext(context -> decrypt(decryptOptions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(DecryptOptions decryptOptions, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(decryptOptions, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Decrypt operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.decryptAsync(decryptOptions, context, key);
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code digest} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key);
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature. This operation
* requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include: {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature was created.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code digest} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key);
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the {@code keys/wrapKey} permission for non-local
* operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped.
*
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* contains the wrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code key} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the wrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Wrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key);
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation
* is the reverse of the wrap operation. The unwrap operation supports asymmetric and symmetric keys to unwrap. This
* operation requires the {@code keys/unwrapKey} permission for non-local operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
*
* @return A {@link Mono} containing an {@link UnwrapResult} whose {@link UnwrapResult
* key} contains the unwrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code encryptedKey} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the unwrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Unwrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key);
});
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code data} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign Operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key);
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys. In case of asymmetric keys public portion of the key is used to verify the signature.
* This operation requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code data} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key);
});
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyNotAvailable = (key == null && keyCollection != null);
boolean keyNotValid = (key != null && !key.isValid());
if (keyNotAvailable || keyNotValid) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().map(jsonWebKey -> {
key = (jsonWebKey);
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
} else {
return getKey().map(keyVaultKey -> {
key = (keyVaultKey.getKey());
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
static final String KEYVAULT_TRACING_NAMESPACE_VALUE = "Microsoft.KeyVault";
JsonWebKey key;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private final CryptographyService service;
private final HttpPipeline pipeline;
private final String keyId;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private String keyCollection;
/**
* Creates a {@link CryptographyAsyncClient} that uses a given {@link HttpPipeline pipeline} to service requests.
*
* @param keyId The Azure Key Vault key identifier to use for cryptography operations.
* @param pipeline {@link HttpPipeline} that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
this.pipeline = pipeline;
this.service = RestProxy.create(CryptographyService.class, pipeline);
this.cryptographyServiceClient = new CryptographyServiceClient(keyId, service, version);
this.key = null;
}
/**
* Creates a {@link CryptographyAsyncClient} that uses a {@link JsonWebKey} to perform local cryptography
* operations.
*
* @param jsonWebKey The {@link JsonWebKey} to use for local cryptography operations.
*/
CryptographyAsyncClient(JsonWebKey jsonWebKey) {
Objects.requireNonNull(jsonWebKey, "The JSON Web Key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("The JSON Web Key is not valid.");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("The JSON Web Key's key operations property is not configured.");
}
if (jsonWebKey.getKeyType() == null) {
throw new IllegalArgumentException("The JSON Web Key's key type property is not configured.");
}
this.key = jsonWebKey;
this.keyId = jsonWebKey.getId();
this.pipeline = null;
this.service = null;
this.cryptographyServiceClient = null;
initializeCryptoClients();
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) {
this.localKeyCryptographyClient = new RsaKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) {
this.localKeyCryptographyClient = new EcKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(OCT) || key.getKeyType().equals(OCT_HSM)) {
this.localKeyCryptographyClient = new AesKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The JSON Web Key type: %s is not supported.", this.key.getKeyType().toString())));
}
}
/**
* Gets the {@link HttpPipeline} powering this client.
*
* @return The pipeline.
*/
HttpPipeline getHttpPipeline() {
return this.pipeline;
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(this.keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(this::getKeyWithResponse);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
if (cryptographyServiceClient != null) {
return cryptographyServiceClient.getKey(context);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(
"Operation not supported when in operating local-only mode"));
}
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context))
.flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plaintext The content to be encrypted.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plaintext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) {
return encrypt(new EncryptParameters(algorithm, plaintext, null, null), null);
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param encryptParameters The parameters to use in the encryption operation.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plaintext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptParameters encryptParameters) {
Objects.requireNonNull(encryptParameters, "'encryptParameters' cannot be null.");
try {
return withContext(context -> encrypt(encryptParameters, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptParameters encryptParameters, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(encryptParameters, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Encrypt operation is missing permission/not supported for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.encryptAsync(encryptParameters, context, key);
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param ciphertext The content to be decrypted.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code ciphertext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] ciphertext) {
return decrypt(new DecryptParameters(algorithm, ciphertext, null, null, null));
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param decryptParameters The parameters to use in the decryption operation.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code ciphertext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(DecryptParameters decryptParameters) {
Objects.requireNonNull(decryptParameters, "'decryptParameters' cannot be null.");
try {
return withContext(context -> decrypt(decryptParameters, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(DecryptParameters decryptParameters, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(decryptParameters, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Decrypt operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.decryptAsync(decryptParameters, context, key);
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code digest} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key);
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature. This operation
* requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include: {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature was created.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code digest} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key);
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the {@code keys/wrapKey} permission for non-local
* operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped.
*
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* contains the wrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code key} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the wrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Wrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key);
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation
* is the reverse of the wrap operation. The unwrap operation supports asymmetric and symmetric keys to unwrap. This
* operation requires the {@code keys/unwrapKey} permission for non-local operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
*
* @return A {@link Mono} containing an {@link UnwrapResult} whose {@link UnwrapResult
* key} contains the unwrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code encryptedKey} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the unwrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Unwrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key);
});
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code data} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign Operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key);
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys. In case of asymmetric keys public portion of the key is used to verify the signature.
* This operation requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code data} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key);
});
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyNotAvailable = (key == null && keyCollection != null);
boolean keyNotValid = (key != null && !key.isValid());
if (keyNotAvailable || keyNotValid) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().map(jsonWebKey -> {
key = (jsonWebKey);
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
} else {
return getKey().map(keyVaultKey -> {
key = (keyVaultKey.getKey());
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} |
How about assertThat(taskError).withFailMessage("xxx").isNull()? This class has 3 similar code, LeftOuterJoinTaskTest has 2, and RightOuterJoinTaskTest has 2, please update them if make sense to you. | void testCancelOuterJoinTaskWhileSort1() throws Exception {
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInputSorted(
new DelayingIterator<>(new InfiniteIntTupleIterator(), 100),
this.serializer,
this.comparator1.duplicate());
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer);
final AtomicReference<Throwable> error = new AtomicReference<>();
final Thread taskRunner =
new Thread("Task runner for testCancelOuterJoinTaskWhileSort1()") {
@Override
public void run() {
try {
testDriver(testTask, MockJoinStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.interrupt();
taskRunner.join(60000);
assertThat(taskRunner.isAlive())
.withFailMessage("Task thread did not finish within 60 seconds")
.isFalse();
final Throwable taskError = error.get();
if (taskError != null) {
fail("Error in task while canceling:\n" + Throwables.getStackTraceAsString(taskError));
}
} | fail("Error in task while canceling:\n" + Throwables.getStackTraceAsString(taskError)); | void testCancelOuterJoinTaskWhileSort1() throws Exception {
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInputSorted(
new DelayingIterator<>(new InfiniteIntTupleIterator(), 100),
this.serializer,
this.comparator1.duplicate());
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer);
final AtomicReference<Throwable> error = new AtomicReference<>();
final Thread taskRunner =
new Thread("Task runner for testCancelOuterJoinTaskWhileSort1()") {
@Override
public void run() {
try {
testDriver(testTask, MockJoinStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.interrupt();
taskRunner.join(60000);
assertThat(taskRunner.isAlive())
.withFailMessage("Task thread did not finish within 60 seconds")
.isFalse();
assertThat(error.get()).isNull();
} | class AbstractOuterJoinTaskTest
extends BinaryOperatorTestBase<
FlatJoinFunction<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>> {
private static final long HASH_MEM = 6 * 1024 * 1024;
private static final long SORT_MEM = 3 * 1024 * 1024;
private static final int NUM_SORTER = 2;
private static final long BNLJN_MEM = 10 * PAGE_SIZE;
private final double bnljn_frac;
@SuppressWarnings("unchecked")
protected final TypeComparator<Tuple2<Integer, Integer>> comparator1 =
new TupleComparator<>(
new int[] {0},
new TypeComparator<?>[] {new IntComparator(true)},
new TypeSerializer<?>[] {IntSerializer.INSTANCE});
@SuppressWarnings("unchecked")
protected final TypeComparator<Tuple2<Integer, Integer>> comparator2 =
new TupleComparator<>(
new int[] {0},
new TypeComparator<?>[] {new IntComparator(true)},
new TypeSerializer<?>[] {IntSerializer.INSTANCE});
protected final List<Tuple2<Integer, Integer>> outList = new ArrayList<>();
@SuppressWarnings("unchecked")
protected final TypeSerializer<Tuple2<Integer, Integer>> serializer =
new TupleSerializer<>(
(Class<Tuple2<Integer, Integer>>) (Class<?>) Tuple2.class,
new TypeSerializer<?>[] {IntSerializer.INSTANCE, IntSerializer.INSTANCE});
AbstractOuterJoinTaskTest(ExecutionConfig config) {
super(config, HASH_MEM, NUM_SORTER, SORT_MEM);
bnljn_frac = (double) BNLJN_MEM / this.getMemoryManager().getMemorySize();
}
@TestTemplate
void testSortBoth1OuterJoinTask() throws Exception {
final int keyCnt1 = 20;
final int valCnt1 = 1;
final int keyCnt2 = 10;
final int valCnt2 = 2;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth2OuterJoinTask() throws Exception {
final int keyCnt1 = 20;
final int valCnt1 = 1;
final int keyCnt2 = 20;
final int valCnt2 = 1;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth3OuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 20;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth4OuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 1;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth5OuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth6OuterJoinTask() throws Exception {
int keyCnt1 = 10;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 2;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
private void testSortBothOuterJoinTask(int keyCnt1, int valCnt1, int keyCnt2, int valCnt2)
throws Exception {
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInputSorted(
new UniformIntTupleGenerator(keyCnt1, valCnt1, false),
this.serializer,
this.comparator1.duplicate());
addInputSorted(
new UniformIntTupleGenerator(keyCnt2, valCnt2, false),
this.serializer,
this.comparator2.duplicate());
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testSortFirstOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInputSorted(
new UniformIntTupleGenerator(keyCnt1, valCnt1, false),
this.serializer,
this.comparator1.duplicate());
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer);
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testSortSecondOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer);
addInputSorted(
new UniformIntTupleGenerator(keyCnt2, valCnt2, false),
this.serializer,
this.comparator2.duplicate());
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testMergeOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer);
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer);
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testFailingOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer);
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer);
assertThatThrownBy(() -> testDriver(testTask, MockFailingJoinStub.class))
.isInstanceOf(ExpectedTestException.class);
}
@TestTemplate
@TestTemplate
void testCancelOuterJoinTaskWhileSort2() throws Exception {
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 1), this.serializer);
addInputSorted(
new DelayingIterator<>(new InfiniteIntTupleIterator(), 1),
this.serializer,
this.comparator2.duplicate());
final AtomicReference<Throwable> error = new AtomicReference<>();
final Thread taskRunner =
new Thread("Task runner for testCancelOuterJoinTaskWhileSort2()") {
@Override
public void run() {
try {
testDriver(testTask, MockJoinStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.interrupt();
taskRunner.join(60000);
assertThat(taskRunner.isAlive())
.withFailMessage("Task thread did not finish within 60 seconds")
.isFalse();
final Throwable taskError = error.get();
if (taskError != null) {
fail("Error in task while canceling:\n" + Throwables.getStackTraceAsString(taskError));
}
}
@TestTemplate
void testCancelOuterJoinTaskWhileRunning() throws Exception {
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer);
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer);
final AtomicReference<Throwable> error = new AtomicReference<>();
final Thread taskRunner =
new Thread("Task runner for testCancelOuterJoinTaskWhileRunning()") {
@Override
public void run() {
try {
testDriver(testTask, MockJoinStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.interrupt();
taskRunner.join(60000);
assertThat(taskRunner.isAlive())
.withFailMessage("Task thread did not finish within 60 seconds")
.isFalse();
final Throwable taskError = error.get();
if (taskError != null) {
fail("Error in task while canceling:\n" + Throwables.getStackTraceAsString(taskError));
}
}
protected abstract AbstractOuterJoinDriver<
Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>
getOuterJoinDriver();
protected abstract int calculateExpectedCount(
int keyCnt1, int valCnt1, int keyCnt2, int valCnt2);
protected abstract DriverStrategy getSortDriverStrategy();
@SuppressWarnings("serial")
public static final class MockJoinStub
implements FlatJoinFunction<
Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
@Override
public void join(
Tuple2<Integer, Integer> first,
Tuple2<Integer, Integer> second,
Collector<Tuple2<Integer, Integer>> out)
throws Exception {
out.collect(first != null ? first : second);
}
}
@SuppressWarnings("serial")
public static final class MockFailingJoinStub
implements FlatJoinFunction<
Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
private int cnt = 0;
@Override
public void join(
Tuple2<Integer, Integer> first,
Tuple2<Integer, Integer> second,
Collector<Tuple2<Integer, Integer>> out)
throws Exception {
if (++this.cnt >= 10) {
throw new ExpectedTestException();
}
out.collect(first != null ? first : second);
}
}
} | class AbstractOuterJoinTaskTest
extends BinaryOperatorTestBase<
FlatJoinFunction<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>> {
private static final long HASH_MEM = 6 * 1024 * 1024;
private static final long SORT_MEM = 3 * 1024 * 1024;
private static final int NUM_SORTER = 2;
private static final long BNLJN_MEM = 10 * PAGE_SIZE;
private final double bnljn_frac;
@SuppressWarnings("unchecked")
protected final TypeComparator<Tuple2<Integer, Integer>> comparator1 =
new TupleComparator<>(
new int[] {0},
new TypeComparator<?>[] {new IntComparator(true)},
new TypeSerializer<?>[] {IntSerializer.INSTANCE});
@SuppressWarnings("unchecked")
protected final TypeComparator<Tuple2<Integer, Integer>> comparator2 =
new TupleComparator<>(
new int[] {0},
new TypeComparator<?>[] {new IntComparator(true)},
new TypeSerializer<?>[] {IntSerializer.INSTANCE});
protected final List<Tuple2<Integer, Integer>> outList = new ArrayList<>();
@SuppressWarnings("unchecked")
protected final TypeSerializer<Tuple2<Integer, Integer>> serializer =
new TupleSerializer<>(
(Class<Tuple2<Integer, Integer>>) (Class<?>) Tuple2.class,
new TypeSerializer<?>[] {IntSerializer.INSTANCE, IntSerializer.INSTANCE});
AbstractOuterJoinTaskTest(ExecutionConfig config) {
super(config, HASH_MEM, NUM_SORTER, SORT_MEM);
bnljn_frac = (double) BNLJN_MEM / this.getMemoryManager().getMemorySize();
}
@TestTemplate
void testSortBoth1OuterJoinTask() throws Exception {
final int keyCnt1 = 20;
final int valCnt1 = 1;
final int keyCnt2 = 10;
final int valCnt2 = 2;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth2OuterJoinTask() throws Exception {
final int keyCnt1 = 20;
final int valCnt1 = 1;
final int keyCnt2 = 20;
final int valCnt2 = 1;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth3OuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 20;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth4OuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 1;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth5OuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
@TestTemplate
void testSortBoth6OuterJoinTask() throws Exception {
int keyCnt1 = 10;
int valCnt1 = 1;
int keyCnt2 = 20;
int valCnt2 = 2;
testSortBothOuterJoinTask(keyCnt1, valCnt1, keyCnt2, valCnt2);
}
private void testSortBothOuterJoinTask(int keyCnt1, int valCnt1, int keyCnt2, int valCnt2)
throws Exception {
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInputSorted(
new UniformIntTupleGenerator(keyCnt1, valCnt1, false),
this.serializer,
this.comparator1.duplicate());
addInputSorted(
new UniformIntTupleGenerator(keyCnt2, valCnt2, false),
this.serializer,
this.comparator2.duplicate());
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testSortFirstOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInputSorted(
new UniformIntTupleGenerator(keyCnt1, valCnt1, false),
this.serializer,
this.comparator1.duplicate());
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer);
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testSortSecondOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer);
addInputSorted(
new UniformIntTupleGenerator(keyCnt2, valCnt2, false),
this.serializer,
this.comparator2.duplicate());
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testMergeOuterJoinTask() throws Exception {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(this.outList, this.serializer);
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer);
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer);
testDriver(testTask, MockJoinStub.class);
final int expCnt = calculateExpectedCount(keyCnt1, valCnt1, keyCnt2, valCnt2);
assertThat(this.outList)
.withFailMessage("Result set size was %d. Expected was %d", outList.size(), expCnt)
.hasSize(expCnt);
this.outList.clear();
}
@TestTemplate
void testFailingOuterJoinTask() {
int keyCnt1 = 20;
int valCnt1 = 20;
int keyCnt2 = 20;
int valCnt2 = 20;
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new UniformIntTupleGenerator(keyCnt1, valCnt1, true), this.serializer);
addInput(new UniformIntTupleGenerator(keyCnt2, valCnt2, true), this.serializer);
assertThatThrownBy(() -> testDriver(testTask, MockFailingJoinStub.class))
.isInstanceOf(ExpectedTestException.class);
}
@TestTemplate
@TestTemplate
void testCancelOuterJoinTaskWhileSort2() throws Exception {
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(this.bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 1), this.serializer);
addInputSorted(
new DelayingIterator<>(new InfiniteIntTupleIterator(), 1),
this.serializer,
this.comparator2.duplicate());
final AtomicReference<Throwable> error = new AtomicReference<>();
final Thread taskRunner =
new Thread("Task runner for testCancelOuterJoinTaskWhileSort2()") {
@Override
public void run() {
try {
testDriver(testTask, MockJoinStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.interrupt();
taskRunner.join(60000);
assertThat(taskRunner.isAlive())
.withFailMessage("Task thread did not finish within 60 seconds")
.isFalse();
assertThat(error.get()).isNull();
}
@TestTemplate
void testCancelOuterJoinTaskWhileRunning() throws Exception {
setOutput(new DiscardingOutputCollector<Tuple2<Integer, Integer>>());
addDriverComparator(this.comparator1);
addDriverComparator(this.comparator2);
getTaskConfig().setDriverPairComparator(new RuntimePairComparatorFactory());
getTaskConfig().setDriverStrategy(this.getSortDriverStrategy());
getTaskConfig().setRelativeMemoryDriver(bnljn_frac);
setNumFileHandlesForSort(4);
final AbstractOuterJoinDriver<
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>,
Tuple2<Integer, Integer>>
testTask = getOuterJoinDriver();
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer);
addInput(new DelayingIterator<>(new InfiniteIntTupleIterator(), 100), this.serializer);
final AtomicReference<Throwable> error = new AtomicReference<>();
final Thread taskRunner =
new Thread("Task runner for testCancelOuterJoinTaskWhileRunning()") {
@Override
public void run() {
try {
testDriver(testTask, MockJoinStub.class);
} catch (Throwable t) {
error.set(t);
}
}
};
taskRunner.start();
Thread.sleep(1000);
cancel();
taskRunner.interrupt();
taskRunner.join(60000);
assertThat(taskRunner.isAlive())
.withFailMessage("Task thread did not finish within 60 seconds")
.isFalse();
assertThat(error.get()).isNull();
}
protected abstract AbstractOuterJoinDriver<
Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>>
getOuterJoinDriver();
protected abstract int calculateExpectedCount(
int keyCnt1, int valCnt1, int keyCnt2, int valCnt2);
protected abstract DriverStrategy getSortDriverStrategy();
@SuppressWarnings("serial")
public static final class MockJoinStub
implements FlatJoinFunction<
Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
@Override
public void join(
Tuple2<Integer, Integer> first,
Tuple2<Integer, Integer> second,
Collector<Tuple2<Integer, Integer>> out)
throws Exception {
out.collect(first != null ? first : second);
}
}
@SuppressWarnings("serial")
public static final class MockFailingJoinStub
implements FlatJoinFunction<
Tuple2<Integer, Integer>, Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> {
private int cnt = 0;
@Override
public void join(
Tuple2<Integer, Integer> first,
Tuple2<Integer, Integer> second,
Collector<Tuple2<Integer, Integer>> out)
throws Exception {
if (++this.cnt >= 10) {
throw new ExpectedTestException();
}
out.collect(first != null ? first : second);
}
}
} |